Skip to content

Commit

Permalink
Merge branch 'bedrock_streaming' into bedrock_streaming_integration_t…
Browse files Browse the repository at this point in the history
…esting
  • Loading branch information
stephmilovic committed Nov 15, 2023
2 parents 4f95837 + 6875c63 commit 9ab90b4
Show file tree
Hide file tree
Showing 5 changed files with 42 additions and 1 deletion.
10 changes: 9 additions & 1 deletion x-pack/plugins/actions/server/lib/gen_ai_token_tracking.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,15 @@ interface OwnProps {
result: ActionTypeExecutorRawResult<unknown>;
validatedParams: Record<string, unknown>;
}

/*
* Calculates the total, prompt, and completion token counts from different types of responses.
* It handles both streamed and non-streamed responses from OpenAI and Bedrock.
* It returns null if it cannot calculate the token counts.
* @param actionTypeId the action type id
* @param logger the logger
* @param result the result from the action executor
* @param validatedParams the validated params from the action executor
*/
export const getGenAiTokenTracking = async ({
actionTypeId,
logger,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@ import { encode } from 'gpt-tokenizer';
export interface InvokeBody {
prompt: string;
}

/**
* Takes the Bedrock `run` and `test` sub action response and the request prompt as inputs.
* Uses gpt-tokenizer encoding to calculate the number of tokens in the prompt and completion.
* Returns an object containing the total, prompt, and completion token counts.
* @param response (string) - the response completion from the `run` or `test` sub action
* @param body - the stringified request prompt
*/
export async function getTokenCountFromBedrockInvoke({
response,
body,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,14 @@ export interface InvokeBody {
content: string;
}>;
}

/**
* Takes the OpenAI and Bedrock `invokeStream` sub action response stream and the request messages array as inputs.
* Uses gpt-tokenizer encoding to calculate the number of tokens in the prompt and completion parts of the response stream
* Returns an object containing the total, prompt, and completion token counts.
* @param responseStream the response stream from the `invokeStream` sub action
* @param body the request messages array
*/
export async function getTokenCountFromInvokeStream({
responseStream,
body,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,11 @@ const formatBedrockBody = ({
};
};

/**
* Takes in a readable stream of data and returns a Transform stream that
* uses the AWS proprietary codec to parse the proprietary bedrock response into
* a string of the response text alone, returning the response string to the stream
*/
const transformToString = () =>
new Transform({
transform(chunk, encoding, callback) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,12 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {
return { available: response.success };
}

/**
* Responsible for invoking the streamApi method with the provided body and
* stream parameters set to true. It then returns a Transform stream that processes
* the response from the streamApi method and returns the response string alone.
* @param body - the OpenAI Invoke request body
*/
public async invokeStream(body: InvokeAIActionParams): Promise<Transform> {
const res = (await this.streamApi({
body: JSON.stringify(body),
Expand Down Expand Up @@ -222,6 +228,12 @@ export class OpenAIConnector extends SubActionConnector<Config, Secrets> {
};
}
}

/**
* Takes in a readable stream of data and returns a Transform stream that
* parses the proprietary OpenAI response into a string of the response text alone,
* returning the response string to the stream
*/
const transformToString = () =>
new Transform({
transform(chunk, encoding, callback) {
Expand Down

0 comments on commit 9ab90b4

Please sign in to comment.