Skip to content

[Search] Playground sends telemetry headers with completion requests #221292

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,14 @@ const createChunkEvent = (input: ChunkEventInput): ChatCompletionChunkEvent => {
}
};

const telemetryMetadata = {
pluginId: 'plugin-id',
};

const metadata = {
connectorTelemetry: telemetryMetadata,
};

describe('InferenceChatModel', () => {
let chatComplete: ChatCompleteAPI & jest.MockedFn<ChatCompleteAPI>;
let connector: InferenceConnector;
Expand All @@ -107,6 +115,7 @@ describe('InferenceChatModel', () => {
const chatModel = new InferenceChatModel({
chatComplete,
connector,
telemetryMetadata,
});

const response = createResponse({ content: 'dummy' });
Expand All @@ -124,13 +133,15 @@ describe('InferenceChatModel', () => {
},
],
stream: false,
metadata,
});
});

it('converts a complete conversation call', async () => {
const chatModel = new InferenceChatModel({
chatComplete,
connector,
telemetryMetadata,
});

const response = createResponse({ content: 'dummy' });
Expand Down Expand Up @@ -170,13 +181,15 @@ describe('InferenceChatModel', () => {
},
],
stream: false,
metadata,
});
});

it('converts a tool call conversation', async () => {
const chatModel = new InferenceChatModel({
chatComplete,
connector,
telemetryMetadata,
});

const response = createResponse({ content: 'dummy' });
Expand Down Expand Up @@ -247,13 +260,15 @@ describe('InferenceChatModel', () => {
},
],
stream: false,
metadata,
});
});

it('converts tools', async () => {
const chatModel = new InferenceChatModel({
chatComplete,
connector,
telemetryMetadata,
});

const response = createResponse({ content: 'dummy' });
Expand Down Expand Up @@ -308,6 +323,7 @@ describe('InferenceChatModel', () => {
},
},
stream: false,
metadata,
});
});

Expand All @@ -320,6 +336,7 @@ describe('InferenceChatModel', () => {
model: 'super-duper-model',
functionCallingMode: 'simulated',
signal: abortCtrl.signal,
telemetryMetadata,
});

const response = createResponse({ content: 'dummy' });
Expand All @@ -336,6 +353,7 @@ describe('InferenceChatModel', () => {
modelName: 'super-duper-model',
abortSignal: abortCtrl.signal,
stream: false,
metadata,
});
});

Expand Down Expand Up @@ -370,6 +388,9 @@ describe('InferenceChatModel', () => {
modelName: 'some-other-model',
abortSignal: abortCtrl.signal,
stream: false,
metadata: {
connectorTelemetry: undefined,
},
});
});
});
Expand Down Expand Up @@ -685,6 +706,7 @@ describe('InferenceChatModel', () => {
const chatModel = new InferenceChatModel({
chatComplete,
connector,
telemetryMetadata,
});

const response = createResponse({ content: 'dummy' });
Expand Down Expand Up @@ -736,6 +758,7 @@ describe('InferenceChatModel', () => {
},
},
stream: false,
metadata,
});
});
});
Expand All @@ -746,6 +769,7 @@ describe('InferenceChatModel', () => {
chatComplete,
connector,
model: 'my-super-model',
telemetryMetadata,
});

const identifyingParams = chatModel.identifyingParams();
Expand All @@ -754,6 +778,7 @@ describe('InferenceChatModel', () => {
connectorId: 'connector-id',
modelName: 'my-super-model',
model_name: 'my-super-model',
metadata,
});
});
});
Expand Down Expand Up @@ -792,6 +817,7 @@ describe('InferenceChatModel', () => {
const chatModel = new InferenceChatModel({
chatComplete,
connector,
telemetryMetadata,
});

const structuredOutputModel = chatModel.withStructuredOutput(
Expand Down Expand Up @@ -858,6 +884,7 @@ describe('InferenceChatModel', () => {
},
},
stream: false,
metadata,
});
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ import {
isToolValidationError,
getConnectorDefaultModel,
getConnectorProvider,
ConnectorTelemetryMetadata,
} from '@kbn/inference-common';
import type { ToolChoice } from './types';
import { toAsyncIterator, wrapInferenceError } from './utils';
Expand All @@ -63,6 +64,7 @@ export interface InferenceChatModelParams extends BaseChatModelParams {
temperature?: number;
model?: string;
signal?: AbortSignal;
telemetryMetadata?: ConnectorTelemetryMetadata;
}

export interface InferenceChatModelCallOptions extends BaseChatModelCallOptions {
Expand Down Expand Up @@ -94,6 +96,7 @@ export class InferenceChatModel extends BaseChatModel<InferenceChatModelCallOpti
private readonly connector: InferenceConnector;
// @ts-ignore unused for now
private readonly logger: Logger;
private readonly telemetryMetadata?: ConnectorTelemetryMetadata;

protected temperature?: number;
protected functionCallingMode?: FunctionCallingMode;
Expand All @@ -104,6 +107,7 @@ export class InferenceChatModel extends BaseChatModel<InferenceChatModelCallOpti
super(args);
this.chatComplete = args.chatComplete;
this.connector = args.connector;
this.telemetryMetadata = args.telemetryMetadata;

this.temperature = args.temperature;
this.functionCallingMode = args.functionCallingMode;
Expand Down Expand Up @@ -183,6 +187,7 @@ export class InferenceChatModel extends BaseChatModel<InferenceChatModelCallOpti
tools: options.tools ? toolDefinitionToInference(options.tools) : undefined,
toolChoice: options.tool_choice ? toolChoiceToInference(options.tool_choice) : undefined,
abortSignal: options.signal ?? this.signal,
metadata: { connectorTelemetry: this.telemetryMetadata },
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ export const getChatParams = async (
// prevents the agent from retrying on failure
// failure could be due to bad connector, we should deliver that result to the client asap
maxRetries: 0,
telemetryMetadata: { pluginId: 'search_playground' }, // hard-coded because the pluginId is not snake cased and the telemetry expects snake case
},
});

Expand Down