Skip to content

Commit

Permalink
feat: demo streaming in sample-code
Browse files Browse the repository at this point in the history
  • Loading branch information
ZhongpinWang committed Oct 25, 2024
1 parent c8611d8 commit 7386dc5
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,14 @@ export class ChatCompletionStream extends Stream<any> {
response!.finishReason = finishReason;
switch (finishReason) {
case 'content_filter':
throw new Error('Stream finished with content filter hit.');
logger.error('Stream finished with content filter hit.');
case 'length':
throw new Error('Stream finished with token length exceeded.');
logger.error('Stream finished with token length exceeded.');
case 'stop':
logger.debug('Stream finished.');
break;
default:
throw new Error(`Stream finished with unknown reason '${finishReason}'.`);
logger.error(`Stream finished with unknown reason '${finishReason}'.`);
}
}
yield chunk;
Expand Down
3 changes: 3 additions & 0 deletions packages/foundation-models/src/azure-openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@ export * from './azure-openai-chat-client.js';
export * from './azure-openai-embedding-client.js';
export * from './azure-openai-chat-completion-response.js';
export * from './azure-openai-embedding-response.js';
export * from './azure-openai-chat-completion-stream-chunk-response.js';
export * from './azure-openai-chat-completion-stream-response.js';
export * from './azure-openai-chat-completion-stream.js';
export * from './model-types.js';
5 changes: 4 additions & 1 deletion packages/foundation-models/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@ export {
AzureOpenAiChatClient,
AzureOpenAiEmbeddingClient,
AzureOpenAiChatCompletionResponse,
AzureOpenAiEmbeddingResponse
AzureOpenAiEmbeddingResponse,
AzureOpenAiChatCompletionStreamChunkResponse,
AzureOpenAiChatCompletionStreamResponse,
ChatCompletionStream
} from './azure-openai/index.js';

export type {
Expand Down
18 changes: 5 additions & 13 deletions sample-code/src/foundation-models/azure-openai.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import {
AzureOpenAiChatClient,
AzureOpenAiEmbeddingClient
AzureOpenAiEmbeddingClient,
AzureOpenAiChatCompletionStreamResponse
} from '@sap-ai-sdk/foundation-models';
import { createLogger } from '@sap-cloud-sdk/util';
import type {
Expand Down Expand Up @@ -32,20 +33,11 @@ export async function chatCompletion(): Promise<AzureOpenAiChatCompletionRespons
* Ask Azure OpenAI model about the capital of France with streaming.
* @returns The response from Azure OpenAI containing the response content.
*/
export async function chatCompletionStream(): Promise<string> {
export async function chatCompletionStream(): Promise<AzureOpenAiChatCompletionStreamResponse> {
const response = await new AzureOpenAiChatClient('gpt-35-turbo').streamContent({
messages: [{ role: 'user', content: 'What is the capital of France?' }]
messages: [{ role: 'user', content: 'Give me a very long introduction of SAP Cloud SDK.' }]
});

let result = '';
for await (const chunk of response.stream) {
logger.info(`chunk: ${chunk}`);
result += chunk;
}

logger.info(`finish reason: ${response.finishReason}`);
logger.info(`usage: ${JSON.stringify(response.usage)}`);
return result;
return response;
}

/**
Expand Down
23 changes: 21 additions & 2 deletions sample-code/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,27 @@ app.get('/azure-openai/chat-completion', async (req, res) => {
app.get('/azure-openai/chat-completion-stream', async (req, res)=> {
try {
const response = await chatCompletionStream();
res.send(response);

res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Connection', 'keep-alive');
res.flushHeaders();

let connectionAlive = true;

res.on('close', () => {
connectionAlive = false;
res.end();
});

for await (const chunk of response.stream) {
if (!connectionAlive) {
break;
}
res.write(chunk);

}
} catch (error: any) {
console.error(error);
res
Expand All @@ -64,7 +84,6 @@ app.get('/azure-openai/chat-completion-stream', async (req, res)=> {
}
});


app.get('/azure-openai/embedding', async (req, res) => {
try {
const response = await computeEmbedding();
Expand Down

0 comments on commit 7386dc5

Please sign in to comment.