Skip to content

Commit

Permalink
Merge branch 'main' into index_management-disable_actions_for_serverl…
Browse files Browse the repository at this point in the history
…ess2
  • Loading branch information
sabarasaba committed Jul 12, 2023
2 parents aea52a5 + d166193 commit 3210251
Show file tree
Hide file tree
Showing 146 changed files with 4,156 additions and 2,700 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -931,7 +931,7 @@
"react-fast-compare": "^2.0.4",
"react-focus-on": "^3.7.0",
"react-grid-layout": "^1.3.4",
"react-hook-form": "^7.43.2",
"react-hook-form": "^7.44.2",
"react-intl": "^2.8.0",
"react-is": "^17.0.2",
"react-markdown": "^6.0.3",
Expand Down
15 changes: 12 additions & 3 deletions packages/core/root/core-root-server-internal/src/bootstrap.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ export async function bootstrap({ configs, cliArgs, applyConfigOverrides }: Boot

const root = new Root(rawConfigService, env, onRootShutdown);
const cliLogger = root.logger.get('cli');
const rootLogger = root.logger.get('root');

rootLogger.info('Kibana is starting');

cliLogger.debug('Kibana configurations evaluated in this order: ' + env.configs.join(', '));

Expand Down Expand Up @@ -77,8 +80,14 @@ export async function bootstrap({ configs, cliArgs, applyConfigOverrides }: Boot
cliLogger.info(`Reloaded Kibana configuration (reason: ${reason}).`, { tags: ['config'] });
}

process.on('SIGINT', () => shutdown());
process.on('SIGTERM', () => shutdown());
process.on('SIGINT', () => {
rootLogger.info('SIGINT received - initiating shutdown');
shutdown();
});
process.on('SIGTERM', () => {
rootLogger.info('SIGTERM received - initiating shutdown');
shutdown();
});

function shutdown(reason?: Error) {
rawConfigService.stop();
Expand All @@ -96,7 +105,7 @@ export async function bootstrap({ configs, cliArgs, applyConfigOverrides }: Boot
}

if (isSetupOnHold) {
root.logger.get().info('Holding setup until preboot stage is completed.');
rootLogger.info('Holding setup until preboot stage is completed.');
const { shouldReloadConfig } = await preboot.waitUntilCanSetup();
if (shouldReloadConfig) {
await reloadConfiguration('configuration might have changed during preboot stage');
Expand Down
12 changes: 10 additions & 2 deletions packages/core/root/core-root-server-internal/src/root/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export class Root {
}

public async shutdown(reason?: any) {
this.log.debug('shutting root down');
this.log.info('Kibana is shutting down');

if (reason) {
if (reason.code === 'EADDRINUSE' && Number.isInteger(reason.port)) {
Expand All @@ -91,7 +91,7 @@ export class Root {
}

if (reason.code !== MIGRATION_EXCEPTION_CODE) {
this.log.fatal(reason);
this.log.fatal(formatShutdownReason(reason));
}
}

Expand Down Expand Up @@ -159,3 +159,11 @@ export class Root {
this.loggingConfigSubscription.add(connectSubscription);
}
}

const formatShutdownReason = (reason: any): string => {
let message = `Reason: ${reason.message ?? reason}`;
if (reason.stack) {
message = `${message}\n${reason.stack}`;
}
return message;
};
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ describe('migration v2', () => {
expect(
records.find((rec) =>
rec.message.startsWith(
`Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.`
`Reason: Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.`
)
)
).toBeDefined();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1117,8 +1117,7 @@ describe('migration actions', () => {
});
});

// FLAKY: https://github.com/elastic/kibana/issues/160994
describe.skip('readWithPit', () => {
describe('readWithPit', () => {
it('requests documents from an index using given PIT', async () => {
const openPitTask = openPit({ client, index: 'existing_index_with_docs' });
const pitResponse = (await openPitTask()) as Either.Right<OpenPitResponse>;
Expand Down Expand Up @@ -1297,7 +1296,12 @@ describe('migration actions', () => {
const leftResponse = (await readWithPitTask()) as Either.Left<EsResponseTooLargeError>;

expect(leftResponse.left.type).toBe('es_response_too_large');
expect(leftResponse.left.contentLength).toBe(3184);
// ES response contains a field that indicates how long it took ES to get the response, e.g.: "took": 7
// if ES takes more than 9ms, the payload will be 1 byte bigger.
// see https://github.com/elastic/kibana/issues/160994
// Thus, the statements below account for response times up to 99ms
expect(leftResponse.left.contentLength).toBeGreaterThanOrEqual(3184);
expect(leftResponse.left.contentLength).toBeLessThanOrEqual(3185);
});

it('rejects if PIT does not exist', async () => {
Expand Down
3 changes: 2 additions & 1 deletion x-pack/packages/kbn-elastic-assistant/impl/assistant/api.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { HttpSetup } from '@kbn/core-http-browser';
import type { Message } from '../assistant_context/types';
import { Conversation } from '../assistant_context/types';
import { API_ERROR } from './translations';
import { MODEL_GPT_3_5_TURBO } from '../connectorland/models/model_selector/model_selector';

export interface FetchConnectorExecuteAction {
apiConfig: Conversation['apiConfig'];
Expand All @@ -33,7 +34,7 @@ export const fetchConnectorExecuteAction = async ({
const body =
apiConfig?.provider === OpenAiProviderType.OpenAi
? {
model: 'gpt-3.5-turbo',
model: apiConfig.model ?? MODEL_GPT_3_5_TURBO,
messages: outboundMessages,
n: 1,
stop: null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ export const AssistantOverlay = React.memo<Props>(({ isAssistantEnabled }) => {
const handleShortcutPress = useCallback(() => {
// Try to restore the last conversation on shortcut pressed
if (!isModalVisible) {
setConversationId(localStorageLastConversationId || WELCOME_CONVERSATION_TITLE);
setConversationId(localStorageLastConversationId ?? WELCOME_CONVERSATION_TITLE);
}

setIsModalVisible(!isModalVisible);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export const TEST_IDS = {
ADD_SYSTEM_PROMPT: 'addSystemPrompt',
PROMPT_SUPERSELECT: 'promptSuperSelect',
CONVERSATIONS_MULTISELECTOR_OPTION: (id: string) => `conversationMultiSelectorOption-${id}`,
SETTINGS_MODAL: 'settingsModal',
SYSTEM_PROMPT_MODAL: {
ID: 'systemPromptModal',
PROMPT_TEXT: 'systemPromptModalPromptText',
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -20,20 +20,20 @@ import useEvent from 'react-use/lib/useEvent';
import { css } from '@emotion/react';

import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/gen_ai/constants';
import { Conversation } from '../../..';
import { useAssistantContext } from '../../assistant_context';
import { Conversation } from '../../../..';
import { useAssistantContext } from '../../../assistant_context';
import * as i18n from './translations';
import { DEFAULT_CONVERSATION_TITLE } from '../use_conversation/translations';
import { useConversation } from '../use_conversation';
import { SystemPromptSelectorOption } from '../prompt_editor/system_prompt/system_prompt_modal/system_prompt_selector/system_prompt_selector';
import { DEFAULT_CONVERSATION_TITLE } from '../../use_conversation/translations';
import { useConversation } from '../../use_conversation';
import { SystemPromptSelectorOption } from '../../prompt_editor/system_prompt/system_prompt_modal/system_prompt_selector/system_prompt_selector';

const isMac = navigator.platform.toLowerCase().indexOf('mac') >= 0;

interface Props {
conversationId?: string;
defaultConnectorId?: string;
defaultProvider?: OpenAiProviderType;
onSelectionChange?: (value: string) => void;
selectedConversationId: string | undefined;
setSelectedConversationId: React.Dispatch<React.SetStateAction<string>>;
shouldDisableKeyboardShortcut?: () => boolean;
isDisabled?: boolean;
}
Expand All @@ -56,17 +56,16 @@ export type ConversationSelectorOption = EuiComboBoxOptionOption<{

export const ConversationSelector: React.FC<Props> = React.memo(
({
conversationId = DEFAULT_CONVERSATION_TITLE,
selectedConversationId = DEFAULT_CONVERSATION_TITLE,
defaultConnectorId,
defaultProvider,
onSelectionChange,
setSelectedConversationId,
shouldDisableKeyboardShortcut = () => false,
isDisabled = false,
}) => {
const { allSystemPrompts } = useAssistantContext();

const { deleteConversation, setConversation } = useConversation();
const [selectedConversationId, setSelectedConversationId] = useState<string>(conversationId);

const { conversations } = useAssistantContext();
const conversationIds = useMemo(() => Object.keys(conversations), [conversations]);
Expand Down Expand Up @@ -112,7 +111,13 @@ export const ConversationSelector: React.FC<Props> = React.memo(
}
setSelectedConversationId(searchValue);
},
[allSystemPrompts, defaultConnectorId, defaultProvider, setConversation]
[
allSystemPrompts,
defaultConnectorId,
defaultProvider,
setConversation,
setSelectedConversationId,
]
);

// Callback for when user deletes a conversation
Expand All @@ -124,32 +129,29 @@ export const ConversationSelector: React.FC<Props> = React.memo(
setTimeout(() => {
deleteConversation(cId);
}, 0);
// onSystemPromptDeleted(cId);
},
[conversationIds, deleteConversation, selectedConversationId]
[conversationIds, deleteConversation, selectedConversationId, setSelectedConversationId]
);

const onChange = useCallback(
(newOptions: ConversationSelectorOption[]) => {
if (newOptions.length === 0) {
setSelectedOptions([]);
// handleSelectionChange([]);
} else if (conversationOptions.findIndex((o) => o.label === newOptions?.[0].label) !== -1) {
setSelectedConversationId(newOptions?.[0].label);
}
// setSelectedConversationId(value ?? DEFAULT_CONVERSATION_TITLE);
},
[conversationOptions]
[conversationOptions, setSelectedConversationId]
);

const onLeftArrowClick = useCallback(() => {
const prevId = getPreviousConversationId(conversationIds, selectedConversationId);
setSelectedConversationId(prevId);
}, [conversationIds, selectedConversationId]);
}, [conversationIds, selectedConversationId, setSelectedConversationId]);
const onRightArrowClick = useCallback(() => {
const nextId = getNextConversationId(conversationIds, selectedConversationId);
setSelectedConversationId(nextId);
}, [conversationIds, selectedConversationId]);
}, [conversationIds, selectedConversationId, setSelectedConversationId]);

// Register keyboard listener for quick conversation switching
const onKeyDown = useCallback(
Expand Down Expand Up @@ -186,9 +188,8 @@ export const ConversationSelector: React.FC<Props> = React.memo(
useEvent('keydown', onKeyDown);

useEffect(() => {
onSelectionChange?.(selectedConversationId);
setSelectedOptions(conversationOptions.filter((c) => c.label === selectedConversationId));
}, [conversationOptions, onSelectionChange, selectedConversationId]);
}, [conversationOptions, selectedConversationId]);

const renderOption: (
option: ConversationSelectorOption,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { i18n } from '@kbn/i18n';
export const SELECTED_CONVERSATION_LABEL = i18n.translate(
'xpack.elasticAssistant.assistant.conversationSelector.defaultConversationTitle',
{
defaultMessage: 'Selected conversation',
defaultMessage: 'Conversations',
}
);

Expand Down
Loading

0 comments on commit 3210251

Please sign in to comment.