-
Notifications
You must be signed in to change notification settings - Fork 145
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add OpenAI client access OPEA microservice UT cases (#653)
* Add OpeanAI access microservice UT cases Signed-off-by: lvliang-intel <liang1.lv@intel.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add the absolute path Signed-off-by: lvliang-intel <liang1.lv@intel.com> * refactor code Signed-off-by: lvliang-intel <liang1.lv@intel.com> * add openai denpendency Signed-off-by: lvliang-intel <liang1.lv@intel.com> * install openai in scripts Signed-off-by: lvliang-intel <liang1.lv@intel.com> * fix ci issue Signed-off-by: lvliang-intel <liang1.lv@intel.com> --------- Signed-off-by: lvliang-intel <liang1.lv@intel.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
- Loading branch information
1 parent
a672569
commit 1b69897
Showing
3 changed files
with
70 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
#!/bin/bash | ||
# Copyright (C) 2024 Intel Corporation | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
import os | ||
import sys | ||
|
||
import openai | ||
|
||
|
||
def validate_svc(ip_address, service_port, service_type): | ||
openai.api_key = os.getenv("OPENAI_API_KEY", "empty") | ||
|
||
endpoint = f"http://{ip_address}:{service_port}" | ||
client = openai.OpenAI( | ||
api_key=openai.api_key, | ||
base_url=endpoint + "/v1", | ||
) | ||
|
||
if service_type == "llm": | ||
response = client.chat.completions.create(model="tgi", messages="What is Deep Learning?", max_tokens=128) | ||
elif service_type == "embedding": | ||
response = client.embeddings.create(model="tei", input="What is Deep Learning?") | ||
else: | ||
print(f"Unknown service type: {service_type}") | ||
exit(1) | ||
result = response.choices[0].text.strip() if service_type == "llm" else response.data[0].embedding | ||
if "Deep Learning is" in result if service_type == "llm" else result: | ||
print("Result correct.") | ||
else: | ||
print(f"Result wrong. Received was {result}") | ||
exit(1) | ||
|
||
|
||
if __name__ == "__main__": | ||
if len(sys.argv) != 4: | ||
print("Usage: python3 validate_svc_with_openai.py <ip_address> <service_port> <service_type>") | ||
exit(1) | ||
ip_address = sys.argv[1] | ||
service_port = sys.argv[2] | ||
service_type = sys.argv[3] | ||
validate_svc(ip_address, service_port, service_type) |