Skip to content

Commit

Permalink
removed response queue
Browse files Browse the repository at this point in the history
  • Loading branch information
kirgrim committed Mar 9, 2024
1 parent 5fba3e7 commit 05f4506
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 3 deletions.
2 changes: 1 addition & 1 deletion neon_llm_core/chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class LLMBot(ChatBot):
def __init__(self, *args, **kwargs):
ChatBot.__init__(self, *args, **kwargs)
self.bot_type = "submind"
self.base_llm = kwargs.get("llm_name") # chat_gpt, fastchat, etc.
self.base_llm = kwargs.get("llm_name") # chatgpt, fastchat, etc.
self.persona = kwargs.get("persona")
self.mq_queue_config = self.get_llm_mq_config(self.base_llm)
LOG.info(f'Initialised config for llm={self.base_llm}|'
Expand Down
3 changes: 1 addition & 2 deletions neon_llm_core/utils/personas/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,7 @@ def _fetch_persona_config(self):
queue = "get_configured_personas"
response = send_mq_request(vhost=LLM_VHOST,
request_data={"service_name": self.service_name},
target_queue=queue,
response_queue=f'{queue}.{self.service_name}.response')
target_queue=queue)
self.personas = response.get('items', [])
for persona in self.personas:
if persona:
Expand Down

0 comments on commit 05f4506

Please sign in to comment.