Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Security solution] Improve AI connector error handling #167674

Merged
merged 12 commits into from
Oct 2, 2023
27 changes: 16 additions & 11 deletions x-pack/packages/kbn-elastic-assistant/impl/assistant/api.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -61,19 +61,24 @@ export const fetchConnectorExecuteAction = async ({
? `/internal/elastic_assistant/actions/connector/${apiConfig?.connectorId}/_execute`
: `/api/actions/connector/${apiConfig?.connectorId}/_execute`;

const response = await http.fetch<{ connector_id: string; status: string; data: string }>(
path,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
signal,
}
);
const response = await http.fetch<{
connector_id: string;
status: string;
data: string;
service_message?: string;
}>(path, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
signal,
});

if (response.status !== 'ok' || !response.data) {
if (response.service_message) {
return `${API_ERROR} \n\n${response.service_message}`;
}
return API_ERROR;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ beforeAll(() => {
describe('actionTypeRegistry.get() works', () => {
test('connector type static data is as expected', () => {
expect(actionTypeModel.id).toEqual(ACTION_TYPE_ID);
expect(actionTypeModel.selectMessage).toBe('Send a request to AWS Bedrock systems.');
expect(actionTypeModel.selectMessage).toBe('Send a request to AWS Bedrock.');
expect(actionTypeModel.actionTypeTitle).toBe('AWS Bedrock');
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export function getConnectorType(): BedrockConnector {
id: BEDROCK_CONNECTOR_ID,
iconClass: lazy(() => import('./logo')),
selectMessage: i18n.translate('xpack.stackConnectors.components.bedrock.selectMessageText', {
defaultMessage: 'Send a request to AWS Bedrock systems.',
defaultMessage: 'Send a request to AWS Bedrock.',
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The commit got lost for these couple of translations from the renaming PR, so I am tucking them in here. hope that is ok

}),
actionTypeTitle: BEDROCK_TITLE,
validateParams: async (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ beforeAll(() => {
describe('actionTypeRegistry.get() works', () => {
test('connector type static data is as expected', () => {
expect(actionTypeModel.id).toEqual(ACTION_TYPE_ID);
expect(actionTypeModel.selectMessage).toBe('Send a request to OpenAI systems.');
expect(actionTypeModel.selectMessage).toBe(
'Send a request to an OpenAI or Azure OpenAI service.'
);
expect(actionTypeModel.actionTypeTitle).toBe('OpenAI');
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export function getConnectorType(): OpenAIConnector {
id: OPENAI_CONNECTOR_ID,
iconClass: lazy(() => import('./logo')),
selectMessage: i18n.translate('xpack.stackConnectors.components.genAi.selectMessageText', {
defaultMessage: 'Send a request to OpenAI systems.',
defaultMessage: 'Send a request to an OpenAI or Azure OpenAI service.',
}),
actionTypeTitle: OPENAI_TITLE,
validateParams: async (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
DEFAULT_BEDROCK_URL,
} from '../../../common/bedrock/constants';
import { DEFAULT_BODY } from '../../../public/connector_types/bedrock/constants';
import { AxiosError } from 'axios';

jest.mock('aws4', () => ({
sign: () => ({ signed: true }),
Expand Down Expand Up @@ -151,5 +152,55 @@ describe('BedrockConnector', () => {
await expect(connector.invokeAI(aiAssistantBody)).rejects.toThrow('API Error');
});
});
describe('getResponseErrorMessage', () => {
it('returns an unknown error message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({})).toEqual(
`Unexpected API Error: - Unknown error`
);
});

it('returns the error.message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({ message: 'a message' })).toEqual(
`Unexpected API Error: - a message`
);
});

it('returns the error.response.data.error.message', () => {
const err = {
response: {
headers: {},
status: 404,
statusText: 'Resource Not Found',
data: {
message: 'Resource not found',
},
},
} as AxiosError<{ message?: string }>;
expect(
// @ts-expect-error expects an axios error as the parameter
connector.getResponseErrorMessage(err)
).toEqual(`API Error: Resource Not Found - Resource not found`);
});

it('returns auhtorization error', () => {
const err = {
response: {
headers: {},
status: 401,
statusText: 'Auth error',
data: {
message: 'The api key was invalid.',
},
},
} as AxiosError<{ message?: string }>;

// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage(err)).toEqual(
`Unauthorized API Error - The api key was invalid.`
);
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,17 @@ export class BedrockConnector extends SubActionConnector<Config, Secrets> {
});
}

protected getResponseErrorMessage(error: AxiosError<{ error?: { message?: string } }>): string {
protected getResponseErrorMessage(error: AxiosError<{ message?: string }>): string {
if (!error.response?.status) {
return `Unexpected API Error: ${error.code} - ${error.message}`;
return `Unexpected API Error: ${error.code ?? ''} - ${error.message ?? 'Unknown error'}`;
}
if (error.response.status === 401) {
return 'Unauthorized API Error';
return `Unauthorized API Error${
error.response?.data?.message ? ` - ${error.response.data.message}` : ''
}`;
}
return `API Error: ${error.response?.status} - ${error.response?.statusText}${
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
return `API Error: ${error.response?.statusText}${
error.response?.data?.message ? ` - ${error.response.data.message}` : ''
}`;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
* 2.0.
*/

import { AxiosError } from 'axios';
import { OpenAIConnector } from './openai';
import { actionsConfigMock } from '@kbn/actions-plugin/server/actions_config.mock';
import {
Expand Down Expand Up @@ -282,6 +283,60 @@ describe('OpenAIConnector', () => {
await expect(connector.invokeAI(sampleOpenAiBody)).rejects.toThrow('API Error');
});
});
describe('getResponseErrorMessage', () => {
it('returns an unknown error message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({})).toEqual(
`Unexpected API Error: - Unknown error`
);
});

it('returns the error.message', () => {
// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage({ message: 'a message' })).toEqual(
`Unexpected API Error: - a message`
);
});

it('returns the error.response.data.error.message', () => {
const err = {
response: {
headers: {},
status: 404,
statusText: 'Resource Not Found',
data: {
error: {
message: 'Resource not found',
},
},
},
} as AxiosError<{ error?: { message?: string } }>;
expect(
// @ts-expect-error expects an axios error as the parameter
connector.getResponseErrorMessage(err)
).toEqual(`API Error: Resource Not Found - Resource not found`);
});

it('returns auhtorization error', () => {
const err = {
response: {
headers: {},
status: 401,
statusText: 'Auth error',
data: {
error: {
message: 'The api key was invalid.',
},
},
},
} as AxiosError<{ error?: { message?: string } }>;

// @ts-expect-error expects an axios error as the parameter
expect(connector.getResponseErrorMessage(err)).toEqual(
`Unauthorized API Error - The api key was invalid.`
);
});
});
});

describe('AzureAI', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,14 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {

protected getResponseErrorMessage(error: AxiosError<{ error?: { message?: string } }>): string {
if (!error.response?.status) {
return `Unexpected API Error: ${error.code} - ${error.message}`;
return `Unexpected API Error: ${error.code ?? ''} - ${error.message ?? 'Unknown error'}`;
}
if (error.response.status === 401) {
return 'Unauthorized API Error';
return `Unauthorized API Error${
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
}`;
}
return `API Error: ${error.response?.status} - ${error.response?.statusText}${
return `API Error: ${error.response?.statusText}${
error.response?.data?.error?.message ? ` - ${error.response.data.error?.message}` : ''
}`;
}
Expand Down Expand Up @@ -193,8 +195,6 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {
return result;
}

// TO DO: Pass actual error
// tracked here https://github.com/elastic/security-team/issues/7373
return 'An error occurred sending your message. If the problem persists, please test the connector configuration.';
return 'An error occurred sending your message. If the problem persists, please test the connector configuration. \n\nAPI Error: The response from OpenAI was in an unrecognized format.';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -444,6 +444,35 @@ export default function bedrockTest({ getService }: FtrProviderContext) {
retry: false,
});
});

it('should return an error when error happens', async () => {
const DEFAULT_BODY = {
prompt: `Hello world!`,
max_tokens_to_sample: 300,
stop_sequences: ['\n\nHuman:'],
};
const { body } = await supertest
.post(`/api/actions/connector/${bedrockActionId}/_execute`)
.set('kbn-xsrf', 'foo')
.send({
params: {
subAction: 'test',
subActionParams: {
body: JSON.stringify(DEFAULT_BODY),
},
},
})
.expect(200);

expect(body).to.eql({
status: 'error',
connector_id: bedrockActionId,
message: 'an error occurred while running the action',
retry: true,
service_message:
'Status code: 422. Message: API Error: Unprocessable Entity - Malformed input request: extraneous key [ooooo] is not permitted, please reformat your input and try again.',
});
});
});
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ export default function genAiTest({ getService }: FtrProviderContext) {
return body.id;
};

describe('GenAi', () => {
describe('OpenAI', () => {
after(() => {
objectRemover.removeAll();
});
Expand Down Expand Up @@ -463,6 +463,30 @@ export default function genAiTest({ getService }: FtrProviderContext) {
retry: false,
});
});

it('should return a error when error happens', async () => {
const { body } = await supertest
.post(`/api/actions/connector/${genAiActionId}/_execute`)
.set('kbn-xsrf', 'foo')
.send({
params: {
subAction: 'test',
subActionParams: {
body: '{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"Hello world"}]}',
},
},
})
.expect(200);

expect(body).to.eql({
status: 'error',
connector_id: genAiActionId,
message: 'an error occurred while running the action',
retry: true,
service_message:
'Status code: 422. Message: API Error: Unprocessable Entity - The model `bad model` does not exist',
});
});
});
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,9 @@ export default function ApiTest({ getService }: FtrProviderContext) {
connectorId,
functions: [],
});

console.log('ERRRR', response.body.message);
expect(response.body.message).to.contain(
`400 - Bad Request - This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.`
`an error occurred while running the action - Status code: 400. Message: API Error: Bad Request - This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.`
);
});

Expand Down