Skip to content

Commit 2a20e0a

Browse files
sorenlouvviduni94kibanamachine
authored andcommitted
[Obs AI Assistant] Remove semantic_text migration (elastic#220886)
## Background The semantic text migration was added in 8.17 along with the move to semantic_text field (elastic#186499). ## Suggestion We should remove the semantic_text migration starting in 9.1. Reasons: - The migration has recently caused severe disruption on a number of clusters (elastic#220255). - The migration may no longer be needed. Users are required to upgrade Elasticsearch to 8.18 in order to update to 9.0, or 8.19 to upgrade to 9.1. This could mean that users are guaranteed to have run the migration previously, when upgrading Kibana to 9.1 (needs to be double checked with Kibana folks) --------- Co-authored-by: Viduni Wickramarachchi <[email protected]> Co-authored-by: kibanamachine <[email protected]>
1 parent 0ed4131 commit 2a20e0a

File tree

17 files changed

+113
-360
lines changed

17 files changed

+113
-360
lines changed

x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.test.tsx

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,6 @@ describe('WelcomeMessageKnowledgeBase', () => {
8383
status: {
8484
value: {
8585
enabled: true,
86-
endpoint: { inference_id: 'inference_id' },
8786
kbState: KnowledgeBaseState.DEPLOYING_MODEL,
8887
concreteWriteIndex: 'my-index',
8988
currentInferenceId: 'inference_id',
@@ -130,7 +129,6 @@ describe('WelcomeMessageKnowledgeBase', () => {
130129
value: {
131130
...kb.status.value,
132131
enabled: true,
133-
endpoint: { inference_id: 'inference_id' },
134132
kbState: KnowledgeBaseState.READY,
135133
concreteWriteIndex: 'my-index',
136134
currentInferenceId: 'inference_id',
@@ -156,7 +154,6 @@ describe('WelcomeMessageKnowledgeBase', () => {
156154
status: {
157155
value: {
158156
enabled: true,
159-
endpoint: { inference_id: 'inference_id' },
160157
kbState: KnowledgeBaseState.DEPLOYING_MODEL,
161158
concreteWriteIndex: 'my-index',
162159
currentInferenceId: 'inference_id',
@@ -190,7 +187,6 @@ describe('WelcomeMessageKnowledgeBase', () => {
190187
status: {
191188
value: {
192189
enabled: true,
193-
endpoint: { inference_id: 'inference_id' },
194190
kbState: KnowledgeBaseState.ERROR,
195191
concreteWriteIndex: 'my-index',
196192
currentInferenceId: 'inference_id',
@@ -226,7 +222,6 @@ describe('WelcomeMessageKnowledgeBase', () => {
226222
status: {
227223
value: {
228224
enabled: true,
229-
endpoint: { inference_id: 'inference_id' },
230225
kbState: KnowledgeBaseState.DEPLOYING_MODEL,
231226
concreteWriteIndex: 'my-index',
232227
currentInferenceId: 'inference_id',
@@ -258,7 +253,6 @@ describe('WelcomeMessageKnowledgeBase', () => {
258253
status: {
259254
value: {
260255
kbState: KnowledgeBaseState.READY,
261-
endpoint: { inference_id: 'inference_id' },
262256
enabled: true,
263257
concreteWriteIndex: 'my-index',
264258
currentInferenceId: 'inference_id',

x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/knowledge_base/route.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ const getKnowledgeBaseStatus = createObservabilityAIAssistantServerRoute({
3434
): Promise<{
3535
errorMessage?: string;
3636
enabled: boolean;
37-
endpoint?: Partial<InferenceInferenceEndpointInfo>;
37+
endpoint?: InferenceInferenceEndpointInfo;
3838
modelStats?: Partial<MlTrainedModelStats>;
3939
kbState: KnowledgeBaseState;
4040
currentInferenceId?: string | undefined;

x-pack/platform/plugins/shared/observability_ai_assistant/server/service/client/index.ts

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,6 @@ import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
7070
import { ObservabilityAIAssistantConfig } from '../../config';
7171
import { waitForKbModel, warmupModel } from '../inference_endpoint';
7272
import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base';
73-
import { populateMissingSemanticTextFieldWithLock } from '../startup_migrations/populate_missing_semantic_text_fields';
7473
import { createOrUpdateKnowledgeBaseIndexAssets } from '../index_assets/create_or_update_knowledge_base_index_assets';
7574
import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index';
7675

@@ -675,16 +674,15 @@ export class ObservabilityAIAssistantClient {
675674

676675
logger.debug(`Setting up knowledge base with inference_id: ${nextInferenceId}`);
677676

678-
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => {
679-
logger.debug(
680-
`Current KB write index does not have an inference_id. This is to be expected for indices created before 8.16`
681-
);
682-
return undefined;
683-
});
684-
677+
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient, logger);
685678
if (currentInferenceId === nextInferenceId) {
686679
logger.debug('Inference ID is unchanged. No need to re-index knowledge base.');
687-
warmupModel({ esClient, logger, inferenceId: nextInferenceId }).catch(() => {});
680+
const warmupModelPromise = warmupModel({ esClient, logger, inferenceId: nextInferenceId });
681+
if (waitUntilComplete) {
682+
logger.debug('Waiting for warmup to complete...');
683+
await warmupModelPromise;
684+
logger.debug('Warmup completed.');
685+
}
688686
return { reindex: false, currentInferenceId, nextInferenceId };
689687
}
690688

@@ -711,12 +709,6 @@ export class ObservabilityAIAssistantClient {
711709
logger,
712710
esClient,
713711
});
714-
await populateMissingSemanticTextFieldWithLock({
715-
core,
716-
logger,
717-
config: this.dependencies.config,
718-
esClient: this.dependencies.esClient,
719-
});
720712
})
721713
.catch((e) => {
722714
if (isLockAcquisitionError(e)) {
@@ -730,7 +722,9 @@ export class ObservabilityAIAssistantClient {
730722
});
731723

732724
if (waitUntilComplete) {
725+
logger.debug('Waiting for knowledge base setup to complete...');
733726
await kbSetupPromise;
727+
logger.debug('Knowledge base setup completed.');
734728
}
735729

736730
return { reindex: true, currentInferenceId, nextInferenceId };

x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_conversation_index_assets.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ export async function createOrUpdateConversationIndexAssets({
1919
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
2020
}) {
2121
try {
22-
logger.debug('Setting up index assets');
22+
logger.debug('Setting up conversation index assets');
2323
const [coreStart] = await core.getStartServices();
2424
const { asInternalUser } = coreStart.elasticsearch.client;
2525

x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_knowledge_base_index_assets.ts

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,15 @@ export async function createOrUpdateKnowledgeBaseIndexAssets({
5353
},
5454
});
5555

56-
const writeIndexInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(
57-
() => undefined
58-
);
59-
logger.debug(`Current write index inference id: ${writeIndexInferenceId}`);
56+
const writeIndexInferenceId = await getInferenceIdFromWriteIndex(esClient, logger);
6057

6158
// Knowledge base: write index
6259
// `createConcreteWriteIndex` will create the write index, or update the index mappings if the index already exists
6360
// only invoke `createConcreteWriteIndex` if the write index does not exist or the inferenceId in the component template is the same as the one in the write index
6461
if (!writeIndexInferenceId || writeIndexInferenceId === componentTemplateInferenceId) {
62+
logger.debug(
63+
`Creating or updating mappings for knowledge base write index. Inference ID: ${componentTemplateInferenceId}`
64+
);
6565
const kbAliasName = resourceNames.writeIndexAlias.kb;
6666
await createConcreteWriteIndex({
6767
esClient: asInternalUser,
@@ -76,6 +76,10 @@ export async function createOrUpdateKnowledgeBaseIndexAssets({
7676
},
7777
dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }),
7878
});
79+
} else {
80+
logger.debug(
81+
`Knowledge base write index already exists with a different inference ID (${writeIndexInferenceId}) than the inference ID in the component template (${componentTemplateInferenceId}). Skipping update.`
82+
);
7983
}
8084

8185
logger.info('Successfully set up knowledge base index assets');

x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/update_existing_index_assets.ts

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
*/
77

88
import type { CoreSetup, Logger } from '@kbn/core/server';
9-
import { ELSER_ON_ML_NODE_INFERENCE_ID } from '../../../common';
109
import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
1110
import { createOrUpdateConversationIndexAssets } from './create_or_update_conversation_index_assets';
1211
import { createOrUpdateKnowledgeBaseIndexAssets } from './create_or_update_knowledge_base_index_assets';
@@ -43,13 +42,9 @@ export async function updateExistingIndexAssets({
4342
if (doesKbIndexExist) {
4443
logger.debug('Found index for knowledge base. Updating index assets.');
4544

46-
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => {
47-
logger.debug(
48-
`Current KB write index does not have an inference_id. This is to be expected for indices created before 8.16`
49-
);
50-
return ELSER_ON_ML_NODE_INFERENCE_ID;
51-
});
52-
53-
await createOrUpdateKnowledgeBaseIndexAssets({ logger, core, inferenceId: currentInferenceId });
45+
const inferenceId = await getInferenceIdFromWriteIndex(esClient, logger);
46+
if (inferenceId) {
47+
await createOrUpdateKnowledgeBaseIndexAssets({ logger, core, inferenceId });
48+
}
5449
}
5550
}

x-pack/platform/plugins/shared/observability_ai_assistant/server/service/inference_endpoint.ts

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -107,23 +107,22 @@ export async function getKbModelStatus({
107107
isReIndexing: boolean;
108108
}> {
109109
const enabled = config.enableKnowledgeBase;
110-
const concreteWriteIndex = await getConcreteWriteIndex(esClient);
110+
const concreteWriteIndex = await getConcreteWriteIndex(esClient, logger);
111111
const isReIndexing = await isReIndexInProgress({ esClient, logger, core });
112+
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient, logger);
112113

113-
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => undefined);
114114
if (!inferenceId) {
115115
if (!currentInferenceId) {
116-
logger.error('Inference id not provided and not found in write index');
117116
return {
118117
enabled,
119-
errorMessage: 'Inference id not found',
118+
errorMessage: 'Inference ID not found in write index',
119+
currentInferenceId: undefined,
120120
kbState: KnowledgeBaseState.NOT_INSTALLED,
121121
concreteWriteIndex,
122122
isReIndexing,
123123
};
124124
}
125125

126-
logger.debug(`Using current inference id "${currentInferenceId}" from write index`);
127126
inferenceId = currentInferenceId;
128127
}
129128

@@ -226,8 +225,8 @@ export async function getKbModelStatus({
226225
enabled,
227226
modelStats,
228227
kbState,
229-
currentInferenceId,
230228
concreteWriteIndex,
229+
currentInferenceId,
231230
isReIndexing,
232231
};
233232
}
@@ -245,17 +244,27 @@ export async function waitForKbModel({
245244
config: ObservabilityAIAssistantConfig;
246245
inferenceId: string;
247246
}) {
247+
logger.debug(`Waiting for knowledge base model to be ready for inference ID "${inferenceId}" !!`);
248+
248249
// Run a dummy inference to trigger the model to deploy
249250
// This is a workaround for the fact that the model may not be deployed yet
250251
await warmupModel({ esClient, logger, inferenceId }).catch(() => {});
251252

252253
return pRetry(
253254
async () => {
254-
const { kbState } = await getKbModelStatus({ core, esClient, logger, config, inferenceId });
255+
logger.debug(`Checking knowledge base model status for inference ID "${inferenceId}"`);
256+
const { kbState } = await getKbModelStatus({
257+
core,
258+
esClient,
259+
logger,
260+
config,
261+
inferenceId,
262+
});
255263

256264
if (kbState !== KnowledgeBaseState.READY) {
257-
logger.debug('Knowledge base model is not yet ready. Retrying...');
258-
throw new Error('Knowledge base model is not yet ready');
265+
const message = `Knowledge base model is not yet ready. kbState = ${kbState}, `;
266+
logger.debug(message);
267+
throw new Error(message);
259268
}
260269

261270
logger.debug('Knowledge base model is ready.');
@@ -273,7 +282,7 @@ export async function warmupModel({
273282
logger: Logger;
274283
inferenceId: string;
275284
}) {
276-
logger.debug(`Running inference to trigger model deployment for "${inferenceId}"`);
285+
logger.debug(`Warming up model for "${inferenceId}"`);
277286
await pRetry(
278287
() =>
279288
esClient.asInternalUser.inference.inference({
@@ -282,6 +291,6 @@ export async function warmupModel({
282291
}),
283292
{ retries: 10 }
284293
).catch((error) => {
285-
logger.error(`Unable to run inference on endpoint "${inferenceId}": ${error.message}`);
294+
logger.error(`Unable to warm up model for "${inferenceId}": ${error.message}`);
286295
});
287296
}

x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/get_inference_id_from_write_index.ts

Lines changed: 33 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -8,36 +8,54 @@
88
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
99
import { MappingSemanticTextProperty } from '@elastic/elasticsearch/lib/api/types';
1010
import { first } from 'lodash';
11+
import { Logger } from '@kbn/logging';
1112
import { resourceNames } from '..';
1213

13-
export async function getConcreteWriteIndex(esClient: { asInternalUser: ElasticsearchClient }) {
14+
export async function getConcreteWriteIndex(
15+
esClient: { asInternalUser: ElasticsearchClient },
16+
logger: Logger
17+
) {
1418
try {
1519
const res = await esClient.asInternalUser.indices.getAlias({
1620
name: resourceNames.writeIndexAlias.kb,
1721
});
1822

1923
return first(Object.keys(res));
2024
} catch (error) {
25+
logger.debug(
26+
`Unable to get concrete write index for index alias "${resourceNames.writeIndexAlias.kb}": ${error}`
27+
);
2128
return;
2229
}
2330
}
2431

25-
export async function getInferenceIdFromWriteIndex(esClient: {
26-
asInternalUser: ElasticsearchClient;
27-
}): Promise<string> {
28-
const response = await esClient.asInternalUser.indices.getMapping({
29-
index: resourceNames.writeIndexAlias.kb,
30-
});
32+
export async function getInferenceIdFromWriteIndex(
33+
esClient: {
34+
asInternalUser: ElasticsearchClient;
35+
},
36+
logger: Logger
37+
): Promise<string | undefined> {
38+
try {
39+
const response = await esClient.asInternalUser.indices.getMapping({
40+
index: resourceNames.writeIndexAlias.kb,
41+
});
3142

32-
const [indexName, indexMappings] = Object.entries(response)[0];
43+
const [indexName, indexMappings] = Object.entries(response)[0];
3344

34-
const inferenceId = (
35-
indexMappings.mappings?.properties?.semantic_text as MappingSemanticTextProperty
36-
)?.inference_id;
45+
const inferenceId = (
46+
indexMappings.mappings?.properties?.semantic_text as MappingSemanticTextProperty
47+
)?.inference_id;
3748

38-
if (!inferenceId) {
39-
throw new Error(`inference_id not found in field mappings for index ${indexName}`);
40-
}
49+
if (!inferenceId) {
50+
logger.debug(`Inference ID missing in field mappings for index ${indexName}.`);
51+
return undefined;
52+
}
4153

42-
return inferenceId;
54+
return inferenceId;
55+
} catch (error) {
56+
logger.debug(
57+
`Unable to get index mapping for index alias "${resourceNames.writeIndexAlias.kb}": ${error}`
58+
);
59+
return undefined;
60+
}
4361
}

0 commit comments

Comments
 (0)