Skip to content

Commit

Permalink
feat: optimize message return (langgenius#822)
Browse files Browse the repository at this point in the history
  • Loading branch information
takatost authored Aug 13, 2023
1 parent 8fa12f6 commit b1517fd
Showing 1 changed file with 6 additions and 3 deletions.
9 changes: 6 additions & 3 deletions api/core/model_providers/models/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,15 +218,18 @@ def support_streaming(cls):

def _get_prompt_from_messages(self, messages: List[PromptMessage],
model_mode: Optional[ModelMode] = None) -> Union[str | List[BaseMessage]]:
if len(messages) == 0:
raise ValueError("prompt must not be empty.")

if not model_mode:
model_mode = self.model_mode

if model_mode == ModelMode.COMPLETION:
if len(messages) == 0:
return ''

return messages[0].content
else:
if len(messages) == 0:
return []

chat_messages = []
for message in messages:
if message.type == MessageType.HUMAN:
Expand Down

0 comments on commit b1517fd

Please sign in to comment.