From 7f1901295ed8182d9d7157200620ee3e7f1765d5 Mon Sep 17 00:00:00 2001 From: Daggx Date: Wed, 8 Nov 2023 15:14:45 +0100 Subject: [PATCH] [fix] add mistral as new provider + new models --- .../api_keys/mistral_settings_template.json | 6 ++ edenai_apis/apis/__init__.py | 1 + edenai_apis/apis/mistral/__init__.py | 1 + edenai_apis/apis/mistral/errors.py | 17 ++++ edenai_apis/apis/mistral/info.json | 11 +++ edenai_apis/apis/mistral/mistral_api.py | 87 +++++++++++++++++++ .../outputs/text/generation_output.json | 13 +++ 7 files changed, 136 insertions(+) create mode 100644 edenai_apis/api_keys/mistral_settings_template.json create mode 100644 edenai_apis/apis/mistral/__init__.py create mode 100644 edenai_apis/apis/mistral/errors.py create mode 100644 edenai_apis/apis/mistral/info.json create mode 100644 edenai_apis/apis/mistral/mistral_api.py create mode 100644 edenai_apis/apis/mistral/outputs/text/generation_output.json diff --git a/edenai_apis/api_keys/mistral_settings_template.json b/edenai_apis/api_keys/mistral_settings_template.json new file mode 100644 index 00000000..40ba1171 --- /dev/null +++ b/edenai_apis/api_keys/mistral_settings_template.json @@ -0,0 +1,6 @@ +{ + "user_id": "", + "app_id": "", + "key": "" + } + \ No newline at end of file diff --git a/edenai_apis/apis/__init__.py b/edenai_apis/apis/__init__.py index 7f936465..07fd142f 100644 --- a/edenai_apis/apis/__init__.py +++ b/edenai_apis/apis/__init__.py @@ -57,5 +57,6 @@ from .vernai import VernaiApi from .readyredact import ReadyRedactApi from .senseloaf import SenseloafApi +from .mistral import MistralApi # THIS NEEDS TO BE DONE AUTOMATICALLY diff --git a/edenai_apis/apis/mistral/__init__.py b/edenai_apis/apis/mistral/__init__.py new file mode 100644 index 00000000..587fdf7b --- /dev/null +++ b/edenai_apis/apis/mistral/__init__.py @@ -0,0 +1 @@ +from .mistral_api import MistralApi diff --git a/edenai_apis/apis/mistral/errors.py b/edenai_apis/apis/mistral/errors.py new file mode 100644 index 00000000..1661d37a --- /dev/null +++ b/edenai_apis/apis/mistral/errors.py @@ -0,0 +1,17 @@ +from edenai_apis.utils.exception import ( + ProviderErrorLists, + ProviderInternalServerError, + ProviderTimeoutError, +) + +# NOTE: error messages should be regex patterns +ERRORS: ProviderErrorLists = { + ProviderInternalServerError: [ + r"Error calling Clarifai", + r"Failure", + ], + ProviderTimeoutError: [ + r"<[^<>]+debug_error_string = 'UNKNOWN:Error received from peer ipv4:\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ {created_time:'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+[\+\-]\d{2}:\d{2}', grpc_status:14, grpc_message:'GOAWAY received'}'>", + r"Model is deploying" + ] + } diff --git a/edenai_apis/apis/mistral/info.json b/edenai_apis/apis/mistral/info.json new file mode 100644 index 00000000..a8f25282 --- /dev/null +++ b/edenai_apis/apis/mistral/info.json @@ -0,0 +1,11 @@ +{ + "text": { + "generation": { + "constraints": { + "models":["mistral-7B-Instruct", "mistral-7B-OpenOrca","openHermes-2-mistral-7B"], + "default_model": "mistral-7B-Instruct" + }, + "version": "v1" + } + } +} \ No newline at end of file diff --git a/edenai_apis/apis/mistral/mistral_api.py b/edenai_apis/apis/mistral/mistral_api.py new file mode 100644 index 00000000..1d0242a7 --- /dev/null +++ b/edenai_apis/apis/mistral/mistral_api.py @@ -0,0 +1,87 @@ +from typing import Dict +from google.protobuf.json_format import MessageToDict +from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc +from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel +from clarifai_grpc.grpc.api.status import status_code_pb2 + +from edenai_apis.features import ProviderInterface, TextInterface +from edenai_apis.features.text.generation.generation_dataclass import ( + GenerationDataClass, +) +from edenai_apis.loaders.data_loader import ProviderDataEnum +from edenai_apis.loaders.loaders import load_provider +from edenai_apis.utils.exception import ProviderException +from edenai_apis.utils.types import ResponseType + + + +class MistralApi(ProviderInterface, TextInterface): + provider_name = "mistral" + + def __init__(self, api_keys: Dict = {}) -> None: + self.api_settings = load_provider( + ProviderDataEnum.KEY, self.provider_name, api_keys=api_keys + ) + self.user_id = self.api_settings["user_id"] + self.app_id = self.api_settings["app_id"] + self.key = self.api_settings["key"] + + def __chat_markup_tokens(self, model): + if model == "mistral-7B-Instruct": + return "[INST]", "[/INST]" + else: + return "<|im_start|>", "<|im_end|>" + + def text__generation( + self, text: str, temperature: float, max_tokens: int, model: str + ) -> ResponseType[GenerationDataClass]: + start, end = self.__chat_markup_tokens(model) + + text = f"{start} {text} {end}" + + channel = ClarifaiChannel.get_grpc_channel() + stub = service_pb2_grpc.V2Stub(channel) + + metadata = (("authorization", self.key),) + user_data_object = resources_pb2.UserAppIDSet( + user_id="mistralai", app_id="completion" + ) + + post_model_outputs_response = stub.PostModelOutputs( + service_pb2.PostModelOutputsRequest( + user_app_id=user_data_object, + model_id=model, + inputs=[ + resources_pb2.Input( + data=resources_pb2.Data(text=resources_pb2.Text(raw=text)) + ) + ], + ), + metadata=metadata, + ) + + if post_model_outputs_response.status.code != status_code_pb2.SUCCESS: + raise ProviderException( + post_model_outputs_response.status.description, + code=post_model_outputs_response.status.code, + ) + + response = MessageToDict( + post_model_outputs_response, preserving_proto_field_name=True + ) + + output = response.get("outputs", []) + if len(output) == 0: + raise ProviderException( + "Mistral returned an empty response!", + code=post_model_outputs_response.status.code, + ) + + original_response = output[0].get("data", {}) or {} + + return ResponseType[GenerationDataClass]( + original_response=original_response, + standardized_response=GenerationDataClass( + generated_text=(original_response.get("text", {}) or {}).get("raw", "") + ), + ) diff --git a/edenai_apis/apis/mistral/outputs/text/generation_output.json b/edenai_apis/apis/mistral/outputs/text/generation_output.json new file mode 100644 index 00000000..17939e3c --- /dev/null +++ b/edenai_apis/apis/mistral/outputs/text/generation_output.json @@ -0,0 +1,13 @@ +{ + "original_response": { + "text": { + "raw": "party\n\nAI assistant: Hi there! I'm an AI language model, designed to assist and engage in conversations. My name is not \"who are you?\" but I'm here to help you with any questions or tasks you may have. How can I assist you today?", + "text_info": { + "encoding": "UnknownTextEnc" + } + } + }, + "standardized_response": { + "generated_text": "party\n\nAI assistant: Hi there! I'm an AI language model, designed to assist and engage in conversations. My name is not \"who are you?\" but I'm here to help you with any questions or tasks you may have. How can I assist you today?" + } +} \ No newline at end of file