Skip to content

Commit dc5f2ec

Browse files
[Search] Playground sends telemetry headers with completion requests (#221292)
## Summary Playground now sends the X-Elastic-Product-Use-Case header with a `search_playground` for telemetry purposes. ### Checklist Check the PR satisfies following conditions. Reviewers should verify this PR satisfies this list as well. - [x] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md) - [x] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [x] If a plugin configuration key changed, check if it needs to be allowlisted in the cloud and added to the [docker list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker) - [x] This was checked for breaking HTTP API changes, and any breaking changes have been approved by the breaking-change committee. The `release_note:breaking` label should be applied in these situations. - [x] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed - [x] The PR description includes the appropriate Release Notes section, and the correct `release_note:*` label is applied per the [guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) ### Identify risks Does this PR introduce any risks? For example, consider risks like hard to test bugs, performance regression, potential of data loss. Describe the risk, its severity, and mitigation for each identified risk. Invite stakeholders and evaluate how to proceed before merging. - [ ] [See some risk examples](https://github.com/elastic/kibana/blob/main/RISK_MATRIX.mdx) - [ ] ... --------- Co-authored-by: kibanamachine <[email protected]>
1 parent a7b69f6 commit dc5f2ec

File tree

3 files changed

+33
-0
lines changed

3 files changed

+33
-0
lines changed

x-pack/platform/packages/shared/ai-infra/inference-langchain/src/chat_model/inference_chat_model.test.ts

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,14 @@ const createChunkEvent = (input: ChunkEventInput): ChatCompletionChunkEvent => {
9393
}
9494
};
9595

96+
const telemetryMetadata = {
97+
pluginId: 'plugin-id',
98+
};
99+
100+
const metadata = {
101+
connectorTelemetry: telemetryMetadata,
102+
};
103+
96104
describe('InferenceChatModel', () => {
97105
let chatComplete: ChatCompleteAPI & jest.MockedFn<ChatCompleteAPI>;
98106
let connector: InferenceConnector;
@@ -107,6 +115,7 @@ describe('InferenceChatModel', () => {
107115
const chatModel = new InferenceChatModel({
108116
chatComplete,
109117
connector,
118+
telemetryMetadata,
110119
});
111120

112121
const response = createResponse({ content: 'dummy' });
@@ -124,13 +133,15 @@ describe('InferenceChatModel', () => {
124133
},
125134
],
126135
stream: false,
136+
metadata,
127137
});
128138
});
129139

130140
it('converts a complete conversation call', async () => {
131141
const chatModel = new InferenceChatModel({
132142
chatComplete,
133143
connector,
144+
telemetryMetadata,
134145
});
135146

136147
const response = createResponse({ content: 'dummy' });
@@ -170,13 +181,15 @@ describe('InferenceChatModel', () => {
170181
},
171182
],
172183
stream: false,
184+
metadata,
173185
});
174186
});
175187

176188
it('converts a tool call conversation', async () => {
177189
const chatModel = new InferenceChatModel({
178190
chatComplete,
179191
connector,
192+
telemetryMetadata,
180193
});
181194

182195
const response = createResponse({ content: 'dummy' });
@@ -247,13 +260,15 @@ describe('InferenceChatModel', () => {
247260
},
248261
],
249262
stream: false,
263+
metadata,
250264
});
251265
});
252266

253267
it('converts tools', async () => {
254268
const chatModel = new InferenceChatModel({
255269
chatComplete,
256270
connector,
271+
telemetryMetadata,
257272
});
258273

259274
const response = createResponse({ content: 'dummy' });
@@ -308,6 +323,7 @@ describe('InferenceChatModel', () => {
308323
},
309324
},
310325
stream: false,
326+
metadata,
311327
});
312328
});
313329

@@ -320,6 +336,7 @@ describe('InferenceChatModel', () => {
320336
model: 'super-duper-model',
321337
functionCallingMode: 'simulated',
322338
signal: abortCtrl.signal,
339+
telemetryMetadata,
323340
});
324341

325342
const response = createResponse({ content: 'dummy' });
@@ -336,6 +353,7 @@ describe('InferenceChatModel', () => {
336353
modelName: 'super-duper-model',
337354
abortSignal: abortCtrl.signal,
338355
stream: false,
356+
metadata,
339357
});
340358
});
341359

@@ -370,6 +388,9 @@ describe('InferenceChatModel', () => {
370388
modelName: 'some-other-model',
371389
abortSignal: abortCtrl.signal,
372390
stream: false,
391+
metadata: {
392+
connectorTelemetry: undefined,
393+
},
373394
});
374395
});
375396
});
@@ -685,6 +706,7 @@ describe('InferenceChatModel', () => {
685706
const chatModel = new InferenceChatModel({
686707
chatComplete,
687708
connector,
709+
telemetryMetadata,
688710
});
689711

690712
const response = createResponse({ content: 'dummy' });
@@ -736,6 +758,7 @@ describe('InferenceChatModel', () => {
736758
},
737759
},
738760
stream: false,
761+
metadata,
739762
});
740763
});
741764
});
@@ -746,6 +769,7 @@ describe('InferenceChatModel', () => {
746769
chatComplete,
747770
connector,
748771
model: 'my-super-model',
772+
telemetryMetadata,
749773
});
750774

751775
const identifyingParams = chatModel.identifyingParams();
@@ -754,6 +778,7 @@ describe('InferenceChatModel', () => {
754778
connectorId: 'connector-id',
755779
modelName: 'my-super-model',
756780
model_name: 'my-super-model',
781+
metadata,
757782
});
758783
});
759784
});
@@ -792,6 +817,7 @@ describe('InferenceChatModel', () => {
792817
const chatModel = new InferenceChatModel({
793818
chatComplete,
794819
connector,
820+
telemetryMetadata,
795821
});
796822

797823
const structuredOutputModel = chatModel.withStructuredOutput(
@@ -858,6 +884,7 @@ describe('InferenceChatModel', () => {
858884
},
859885
},
860886
stream: false,
887+
metadata,
861888
});
862889
});
863890

x-pack/platform/packages/shared/ai-infra/inference-langchain/src/chat_model/inference_chat_model.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ import {
4242
isToolValidationError,
4343
getConnectorDefaultModel,
4444
getConnectorProvider,
45+
ConnectorTelemetryMetadata,
4546
} from '@kbn/inference-common';
4647
import type { ToolChoice } from './types';
4748
import { toAsyncIterator, wrapInferenceError } from './utils';
@@ -63,6 +64,7 @@ export interface InferenceChatModelParams extends BaseChatModelParams {
6364
temperature?: number;
6465
model?: string;
6566
signal?: AbortSignal;
67+
telemetryMetadata?: ConnectorTelemetryMetadata;
6668
}
6769

6870
export interface InferenceChatModelCallOptions extends BaseChatModelCallOptions {
@@ -94,6 +96,7 @@ export class InferenceChatModel extends BaseChatModel<InferenceChatModelCallOpti
9496
private readonly connector: InferenceConnector;
9597
// @ts-ignore unused for now
9698
private readonly logger: Logger;
99+
private readonly telemetryMetadata?: ConnectorTelemetryMetadata;
97100

98101
protected temperature?: number;
99102
protected functionCallingMode?: FunctionCallingMode;
@@ -104,6 +107,7 @@ export class InferenceChatModel extends BaseChatModel<InferenceChatModelCallOpti
104107
super(args);
105108
this.chatComplete = args.chatComplete;
106109
this.connector = args.connector;
110+
this.telemetryMetadata = args.telemetryMetadata;
107111

108112
this.temperature = args.temperature;
109113
this.functionCallingMode = args.functionCallingMode;
@@ -183,6 +187,7 @@ export class InferenceChatModel extends BaseChatModel<InferenceChatModelCallOpti
183187
tools: options.tools ? toolDefinitionToInference(options.tools) : undefined,
184188
toolChoice: options.tool_choice ? toolChoiceToInference(options.tool_choice) : undefined,
185189
abortSignal: options.signal ?? this.signal,
190+
metadata: { connectorTelemetry: this.telemetryMetadata },
186191
};
187192
}
188193

x-pack/solutions/search/plugins/search_playground/server/lib/get_chat_params.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@ export const getChatParams = async (
8686
// prevents the agent from retrying on failure
8787
// failure could be due to bad connector, we should deliver that result to the client asap
8888
maxRetries: 0,
89+
telemetryMetadata: { pluginId: 'search_playground' }, // hard-coded because the pluginId is not snake cased and the telemetry expects snake case
8990
},
9091
});
9192

0 commit comments

Comments
 (0)