Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Model Discovery #75

Merged
merged 30 commits into from
Aug 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
eb5a411
Integrate changes from main
MatKuhr Aug 18, 2024
4bb8713
Fix linting
MatKuhr Aug 18, 2024
c684896
Merge branch 'main' into feat/model-discovery
MatKuhr Aug 19, 2024
c39e009
Fix tests
MatKuhr Aug 19, 2024
4732b39
fix: Changes from lint
Aug 19, 2024
4458fe4
Fix type test
MatKuhr Aug 19, 2024
f2fb3dc
Merge branch 'feat/model-discovery' of github.com:SAP/ai-sdk-js into …
MatKuhr Aug 19, 2024
98ef924
Merge branch 'main' into feat/model-discovery
MatKuhr Aug 19, 2024
b68ff8d
Add alternative API variant
MatKuhr Aug 19, 2024
a9671a1
Make models readonly
MatKuhr Aug 19, 2024
dd676c0
Remove obsolete class
MatKuhr Aug 20, 2024
41ae69e
Fix tests
MatKuhr Aug 22, 2024
3b2c87d
fix: Changes from lint
Aug 22, 2024
5f6c351
add alternative proposal
marikaner Aug 22, 2024
8518347
Minor improvements
MatKuhr Aug 22, 2024
a745b8c
Refactoring from API discussion
MatKuhr Aug 22, 2024
ea99958
Linting
MatKuhr Aug 22, 2024
7c130d7
More style
MatKuhr Aug 22, 2024
084d633
Merge branch 'main' into feat/model-discovery
MatKuhr Aug 22, 2024
0d4c7df
fix: Changes from lint
Aug 22, 2024
3a5067d
Fix type test
MatKuhr Aug 22, 2024
a7e7cf2
fix: Changes from lint
Aug 22, 2024
f16cdbd
JS docs
MatKuhr Aug 22, 2024
e7c5fcd
fix leftover
MatKuhr Aug 22, 2024
f3b18fa
Merge branch 'main' into feat/model-discovery
MatKuhr Aug 22, 2024
6a2101c
Update packages/gen-ai-hub/src/orchestration/orchestration-client.ts
marikaner Aug 23, 2024
8423be8
Update packages/gen-ai-hub/src/utils/deployment-resolver.ts
marikaner Aug 23, 2024
672fe19
fix typescript issue
tomfrenken Aug 23, 2024
6a4ec01
Merge branch 'main' into feat/model-discovery
tomfrenken Aug 23, 2024
57579cc
update unit tests
tomfrenken Aug 23, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion eslint.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ export default [
rules: { 'import/namespace': 'off'}
},
{
ignores: ['**/dist/**/*', '**/coverage/**/*', 'packages/ai-core/src/client/**/*'],
ignores: ['**/dist*/**/*', '**/coverage/**/*', 'packages/ai-core/src/client/**/*'],
},
{
files: ['**/test-util/**/*.ts', '**/packages/gen-ai-hub/src/orchestration/client/**/*'],
Expand Down
8 changes: 4 additions & 4 deletions packages/core/src/context.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { createLogger } from '@sap-cloud-sdk/util';
import {
Destination,
HttpDestination,
Service,
ServiceCredentials,
getServiceBinding,
Expand All @@ -18,7 +18,7 @@ let aiCoreServiceBinding: Service | undefined;
* Returns a destination object from AI Core service binding.
* @returns The destination object.
*/
export async function getAiCoreDestination(): Promise<Destination> {
export async function getAiCoreDestination(): Promise<HttpDestination> {
if (!aiCoreServiceBinding) {
aiCoreServiceBinding =
getAiCoreServiceKeyFromEnv() || getServiceBinding('aicore');
Expand All @@ -29,12 +29,12 @@ export async function getAiCoreDestination(): Promise<Destination> {
}
}

const aiCoreDestination = await transformServiceBindingToDestination(
const aiCoreDestination = (await transformServiceBindingToDestination(
aiCoreServiceBinding,
{
useCache: true
}
);
)) as HttpDestination;
return aiCoreDestination;
}

Expand Down
8 changes: 3 additions & 5 deletions packages/core/src/http-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,19 +54,17 @@ export interface EndpointOptions {
* @param requestConfig - The request configuration.
* @returns The {@link HttpResponse} from the AI Core service.
*/
export async function executeRequest<Data extends BaseLlmParameters>(
export async function executeRequest(
endpointOptions: EndpointOptions,
data: Data,
data: any,
requestConfig?: CustomRequestConfig
): Promise<HttpResponse> {
const aiCoreDestination = await getAiCoreDestination();
// eslint-disable-next-line @typescript-eslint/no-unused-vars
tomfrenken marked this conversation as resolved.
Show resolved Hide resolved
const { deploymentConfiguration, ...body } = data;
const { url, apiVersion } = endpointOptions;

const mergedRequestConfig = {
...mergeWithDefaultRequestConfig(apiVersion, requestConfig),
data: JSON.stringify(body)
data: JSON.stringify(data)
};

const targetUrl = aiCoreDestination.url + `/v2/${removeLeadingSlashes(url)}`;
Expand Down
10 changes: 3 additions & 7 deletions packages/core/src/openapi-request-builder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,9 @@ export class OpenApiRequestBuilder<
async executeRaw(): Promise<HttpResponse> {
const { url, data, ...rest } = await this.requestConfig();
// TODO: Remove explicit url! once we updated the type in the Cloud SDK, since url is always defined.
return executeRequest(
{ url: url! },
{ deploymentConfiguration: {}, ...data },
{
...rest
}
);
return executeRequest({ url: url! }, data, {
...rest
});
}

/**
Expand Down
1 change: 1 addition & 0 deletions packages/gen-ai-hub/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
},
"dependencies": {
"@sap-ai-sdk/core": "workspace:^",
"@sap-ai-sdk/ai-core": "workspace:^",
marikaner marked this conversation as resolved.
Show resolved Hide resolved
"@sap-cloud-sdk/http-client": "^3.18.1",
"@sap-cloud-sdk/connectivity": "^3.18.1",
"@sap-cloud-sdk/util": "^3.18.1",
Expand Down
28 changes: 0 additions & 28 deletions packages/gen-ai-hub/src/client/interface.ts

This file was deleted.

58 changes: 26 additions & 32 deletions packages/gen-ai-hub/src/client/openai/openai-client.test.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,24 @@
import nock from 'nock';
import { BaseLlmParametersWithDeploymentId } from '@sap-ai-sdk/core';
import {
mockClientCredentialsGrantCall,
mockInference,
parseMockResponse
} from '../../../../../test-util/mock-http.js';
import {
OpenAiChatCompletionOutput,
OpenAiChatCompletionParameters,
OpenAiChatMessage,
OpenAiEmbeddingOutput,
OpenAiEmbeddingParameters
} from './openai-types.js';
import { OpenAiClient } from './openai-client.js';

describe('openai client', () => {
const deploymentConfiguration: BaseLlmParametersWithDeploymentId = {
deploymentId: 'deployment-id'
};
const chatCompletionEndpoint = {
url: `inference/deployments/${deploymentConfiguration.deploymentId}/chat/completions`,
url: 'inference/deployments/1234/chat/completions',
apiVersion: '2024-02-01'
};
const embeddingsEndpoint = {
url: `inference/deployments/${deploymentConfiguration.deploymentId}/embeddings`,
url: 'inference/deployments/1234/embeddings',
apiVersion: '2024-02-01'
};

Expand All @@ -47,18 +42,15 @@ describe('openai client', () => {
}
] as OpenAiChatMessage[]
};
const request: OpenAiChatCompletionParameters = {
...prompt,
deploymentConfiguration
};

const mockResponse = parseMockResponse<OpenAiChatCompletionOutput>(
'openai',
'openai-chat-completion-success-response.json'
);

mockInference(
{
data: request
data: prompt
},
{
data: mockResponse,
Expand All @@ -67,24 +59,24 @@ describe('openai client', () => {
chatCompletionEndpoint
);

const response = await client.chatCompletion(request);
const response = await client.chatCompletion(
'gpt-35-turbo',
prompt,
'1234'
);
expect(response).toEqual(mockResponse);
});

it('throws on bad request', async () => {
const prompt = { messages: [] };
const request: OpenAiChatCompletionParameters = {
...prompt,
deploymentConfiguration
};
const mockResponse = parseMockResponse(
'openai',
'openai-error-response.json'
);

mockInference(
{
data: request
data: prompt
},
{
data: mockResponse,
Expand All @@ -93,50 +85,50 @@ describe('openai client', () => {
chatCompletionEndpoint
);

expect(client.chatCompletion(request)).rejects.toThrow();
await expect(
client.chatCompletion('gpt-4', prompt, '1234')
).rejects.toThrow('status code 400');
});
});

describe('embeddings', () => {
it('parses a successful response', async () => {
const prompt = { input: ['AI is fascinating'] };
const request: OpenAiEmbeddingParameters = {
...prompt,
deploymentConfiguration
};
const prompt = {
input: ['AI is fascinating']
} as OpenAiEmbeddingParameters;
const mockResponse = parseMockResponse<OpenAiEmbeddingOutput>(
'openai',
'openai-embeddings-success-response.json'
);

mockInference(
{
data: request
data: prompt
},
{
data: mockResponse,
status: 200
},
embeddingsEndpoint
);
const response = await client.embeddings(request);
const response = await client.embeddings(
'text-embedding-ada-002',
prompt,
'1234'
);
expect(response).toEqual(mockResponse);
});

it('throws on bad request', async () => {
const prompt = { input: [] };
const request: OpenAiEmbeddingParameters = {
...prompt,
deploymentConfiguration
};
const mockResponse = parseMockResponse(
'openai',
'openai-error-response.json'
);

mockInference(
{
data: request
data: prompt
},
{
data: mockResponse,
Expand All @@ -145,7 +137,9 @@ describe('openai client', () => {
embeddingsEndpoint
);

expect(client.embeddings(request)).rejects.toThrow();
await expect(
client.embeddings('text-embedding-3-large', prompt, '1234')
).rejects.toThrow('status code 400');
});
});
});
Loading
Loading