Skip to content
This repository has been archived by the owner on Apr 10, 2023. It is now read-only.

Commit

Permalink
0.3.3:try to fix error
Browse files Browse the repository at this point in the history
  • Loading branch information
sudoskys committed Jan 15, 2023
1 parent fcac8f6 commit fd71077
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 17 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "openai-kira"
version = "0.3.2"
version = "0.3.3"
description = "A chat client"
authors = ["sudoskys <coldlando@hotmail.com>"]
maintainers = [
Expand Down
11 changes: 5 additions & 6 deletions src/openai_kira/Chat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,16 +281,15 @@ async def get_chat_response(self,
_frequency_penalty, _presence_penalty, _temperature = Detect().get_tendency_arg(prompt=prompt)

# SOME HOT CAKE
api_config = {
_request_arg = {
"frequency_penalty": _frequency_penalty,
"presence_penalty": _presence_penalty,
"temperature": _temperature,
"logit_bias": {}
}
api_config = json.dumps(api_config)
api_config = json.loads(api_config)
config = {key: item for key, item in kwargs.items() if key in api_config.keys()}
api_config.update(config)
_arg_config = {key: item for key, item in kwargs.items() if key in _request_arg.keys()}
_request_arg.update(_arg_config)
_request_arg = json.loads(json.dumps(_request_arg))
# REQ
response = await Completion(api_key=self.__api_key, call_func=self.__call_func).create(
model=model,
Expand All @@ -303,7 +302,7 @@ async def get_chat_response(self,
f"{self._restart_sequence}:",
f"{self._start_sequence}:",
f"{self._restart_sequence}:"],
**api_config
**_request_arg
)
self.record_dialogue(prompt=prompt, response=response)
return response
Expand Down
19 changes: 10 additions & 9 deletions src/openai_kira/utils/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,18 +120,19 @@ def get_tendency_arg(self, prompt: str, memory: list = None, lang: str = "CN") -
if _memory_len / 20 > 0.7:
frequency_penalty = abs(frequency_penalty)

# CHECK
temperature = temperature if 0 < temperature <= 1 else 0.9

presence_penalty = presence_penalty if -2.0 < presence_penalty else -1.5
presence_penalty = presence_penalty if presence_penalty < 2.0 else 1.5

frequency_penalty = frequency_penalty if -2.0 < frequency_penalty else -1.5
frequency_penalty = frequency_penalty if frequency_penalty < 2.0 else 1.5

# FIX
temperature = round(temperature, 1)
presence_penalty = round(presence_penalty, 1)
frequency_penalty = round(frequency_penalty, 1)

# CHECK
temperature = temperature if 0 < temperature < 1 else 0.9

presence_penalty = presence_penalty if -2.0 < presence_penalty else -1
presence_penalty = presence_penalty if presence_penalty < 2.0 else 1

frequency_penalty = frequency_penalty if -2.0 < frequency_penalty else -1
frequency_penalty = frequency_penalty if frequency_penalty < 2.0 else 1
return frequency_penalty, presence_penalty, temperature


Expand Down
2 changes: 1 addition & 1 deletion test/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ async def completion():

async def chat():
response = await receiver.get_chat_response(model="text-davinci-003",
prompt="说一个关于丁真的故事",
prompt="you are ill man",
max_tokens=500,
role="你扮演",
web_enhance_server={"time": ""},
Expand Down

0 comments on commit fd71077

Please sign in to comment.