Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ENG-594] Remove entry prompt #12934

Merged
merged 2 commits into from
Oct 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions rasa/core/policies/flow_policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
FlowsList,
GenerateResponseFlowStep,
IfFlowLink,
EntryPromptFlowStep,
SlotRejection,
StepThatCanStartAFlow,
UserMessageStep,
Expand Down Expand Up @@ -677,10 +676,6 @@ def run_step(
structlogger.debug("flow.step.run.branch")
return ContinueFlowWithNextStep()

elif isinstance(step, EntryPromptFlowStep):
structlogger.debug("flow.step.run.entry_prompt")
return ContinueFlowWithNextStep()

elif isinstance(step, GenerateResponseFlowStep):
structlogger.debug("flow.step.run.generate_response")
generated = step.generate(tracker)
Expand Down
107 changes: 0 additions & 107 deletions rasa/shared/core/flows/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -639,8 +639,6 @@ def step_from_json(flow_step_config: Dict[Text, Any]) -> FlowStep:
return LinkFlowStep.from_json(flow_step_config)
if "set_slots" in flow_step_config:
return SetSlotsFlowStep.from_json(flow_step_config)
if "entry_prompt" in flow_step_config:
return EntryPromptFlowStep.from_json(flow_step_config)
if "generation_prompt" in flow_step_config:
return GenerateResponseFlowStep.from_json(flow_step_config)
else:
Expand Down Expand Up @@ -1144,111 +1142,6 @@ def default_id_postfix(self) -> str:
return "generate"


@dataclass
class EntryPromptFlowStep(FlowStep, StepThatCanStartAFlow):
"""Represents the configuration of a step prompting an LLM."""

entry_prompt: Text
"""The prompt template of the flow step."""
advance_if: Optional[Text]
"""The expected response to start the flow"""
llm_config: Optional[Dict[Text, Any]] = None
"""The LLM configuration of the flow step."""

@classmethod
def from_json(cls, flow_step_config: Dict[Text, Any]) -> EntryPromptFlowStep:
"""Used to read flow steps from parsed YAML.

Args:
flow_step_config: The parsed YAML as a dictionary.

Returns:
The parsed flow step.
"""
base = super()._from_json(flow_step_config)
return EntryPromptFlowStep(
entry_prompt=flow_step_config.get("entry_prompt", ""),
advance_if=flow_step_config.get("advance_if"),
llm_config=flow_step_config.get("llm", None),
**base.__dict__,
)

def as_json(self) -> Dict[Text, Any]:
"""Returns the flow step as a dictionary.

Returns:
The flow step as a dictionary.
"""
dump = super().as_json()
dump["entry_prompt"] = self.entry_prompt
if self.advance_if:
dump["advance_if"] = self.advance_if

if self.llm_config:
dump["llm"] = self.llm_config
return dump

def _generate_using_llm(self, prompt: str) -> Optional[str]:
"""Use LLM to generate a response.

Args:
prompt: the prompt to send to the LLM

Returns:
generated text
"""
from rasa.shared.utils.llm import llm_factory

llm = llm_factory(self.llm_config, DEFAULT_LLM_CONFIG)

try:
return llm(prompt)
except Exception as e:
# unfortunately, langchain does not wrap LLM exceptions which means
# we have to catch all exceptions here
structlogger.error(
"flow.entry_step.llm.error", error=e, step=self.id, prompt=prompt
)
return None

def is_triggered(self, tracker: DialogueStateTracker) -> bool:
"""Returns whether the flow step is triggered by the given intent and entities.

Args:
intent: The intent to check.
entities: The entities to check.

Returns:
Whether the flow step is triggered by the given intent and entities.
"""
from rasa.shared.utils import llm
from jinja2 import Template

if not self.entry_prompt:
return False

context = {
"history": llm.tracker_as_readable_transcript(tracker, max_turns=5),
"latest_user_message": tracker.latest_message.text
if tracker.latest_message
else "",
}
context.update(tracker.current_slot_values())
prompt = Template(self.entry_prompt).render(context)

generated = self._generate_using_llm(prompt)

expected_response = self.advance_if.lower() if self.advance_if else "yes"
if generated and generated.lower() == expected_response:
return True
else:
return False

def default_id_postfix(self) -> str:
"""Returns the default id postfix of the flow step."""
return "entry_prompt"


@dataclass
class SlotRejection:
"""A slot rejection."""
Expand Down
17 changes: 0 additions & 17 deletions rasa/shared/core/flows/flows_yaml_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -151,23 +151,6 @@
"$ref": "#$defs/next"
}
}
},
{
"required": [
"entry_prompt"
],
"additionalProperties": false,
"properties": {
"id": {
"type": "string"
},
"entry_prompt": {
"type": "string"
},
"next": {
"$ref": "#$defs/next"
}
}
}
]
}
Expand Down
Loading