From 66f2c9bfc1506f6fbb42aa81b68128c50efdccba Mon Sep 17 00:00:00 2001 From: Tanja Bunk Date: Mon, 23 Oct 2023 10:28:03 +0200 Subject: [PATCH 1/2] Remove EntryPromptFlowStep class. --- rasa/core/policies/flow_policy.py | 5 -- rasa/shared/core/flows/flow.py | 107 ------------------------------ 2 files changed, 112 deletions(-) diff --git a/rasa/core/policies/flow_policy.py b/rasa/core/policies/flow_policy.py index 31c6ca93521c..fcd282e2233e 100644 --- a/rasa/core/policies/flow_policy.py +++ b/rasa/core/policies/flow_policy.py @@ -56,7 +56,6 @@ FlowsList, GenerateResponseFlowStep, IfFlowLink, - EntryPromptFlowStep, SlotRejection, StepThatCanStartAFlow, UserMessageStep, @@ -677,10 +676,6 @@ def run_step( structlogger.debug("flow.step.run.branch") return ContinueFlowWithNextStep() - elif isinstance(step, EntryPromptFlowStep): - structlogger.debug("flow.step.run.entry_prompt") - return ContinueFlowWithNextStep() - elif isinstance(step, GenerateResponseFlowStep): structlogger.debug("flow.step.run.generate_response") generated = step.generate(tracker) diff --git a/rasa/shared/core/flows/flow.py b/rasa/shared/core/flows/flow.py index 023c5f621349..656309c23564 100644 --- a/rasa/shared/core/flows/flow.py +++ b/rasa/shared/core/flows/flow.py @@ -639,8 +639,6 @@ def step_from_json(flow_step_config: Dict[Text, Any]) -> FlowStep: return LinkFlowStep.from_json(flow_step_config) if "set_slots" in flow_step_config: return SetSlotsFlowStep.from_json(flow_step_config) - if "entry_prompt" in flow_step_config: - return EntryPromptFlowStep.from_json(flow_step_config) if "generation_prompt" in flow_step_config: return GenerateResponseFlowStep.from_json(flow_step_config) else: @@ -1144,111 +1142,6 @@ def default_id_postfix(self) -> str: return "generate" -@dataclass -class EntryPromptFlowStep(FlowStep, StepThatCanStartAFlow): - """Represents the configuration of a step prompting an LLM.""" - - entry_prompt: Text - """The prompt template of the flow step.""" - advance_if: Optional[Text] - """The expected response to start the flow""" - llm_config: Optional[Dict[Text, Any]] = None - """The LLM configuration of the flow step.""" - - @classmethod - def from_json(cls, flow_step_config: Dict[Text, Any]) -> EntryPromptFlowStep: - """Used to read flow steps from parsed YAML. - - Args: - flow_step_config: The parsed YAML as a dictionary. - - Returns: - The parsed flow step. - """ - base = super()._from_json(flow_step_config) - return EntryPromptFlowStep( - entry_prompt=flow_step_config.get("entry_prompt", ""), - advance_if=flow_step_config.get("advance_if"), - llm_config=flow_step_config.get("llm", None), - **base.__dict__, - ) - - def as_json(self) -> Dict[Text, Any]: - """Returns the flow step as a dictionary. - - Returns: - The flow step as a dictionary. - """ - dump = super().as_json() - dump["entry_prompt"] = self.entry_prompt - if self.advance_if: - dump["advance_if"] = self.advance_if - - if self.llm_config: - dump["llm"] = self.llm_config - return dump - - def _generate_using_llm(self, prompt: str) -> Optional[str]: - """Use LLM to generate a response. - - Args: - prompt: the prompt to send to the LLM - - Returns: - generated text - """ - from rasa.shared.utils.llm import llm_factory - - llm = llm_factory(self.llm_config, DEFAULT_LLM_CONFIG) - - try: - return llm(prompt) - except Exception as e: - # unfortunately, langchain does not wrap LLM exceptions which means - # we have to catch all exceptions here - structlogger.error( - "flow.entry_step.llm.error", error=e, step=self.id, prompt=prompt - ) - return None - - def is_triggered(self, tracker: DialogueStateTracker) -> bool: - """Returns whether the flow step is triggered by the given intent and entities. - - Args: - intent: The intent to check. - entities: The entities to check. - - Returns: - Whether the flow step is triggered by the given intent and entities. - """ - from rasa.shared.utils import llm - from jinja2 import Template - - if not self.entry_prompt: - return False - - context = { - "history": llm.tracker_as_readable_transcript(tracker, max_turns=5), - "latest_user_message": tracker.latest_message.text - if tracker.latest_message - else "", - } - context.update(tracker.current_slot_values()) - prompt = Template(self.entry_prompt).render(context) - - generated = self._generate_using_llm(prompt) - - expected_response = self.advance_if.lower() if self.advance_if else "yes" - if generated and generated.lower() == expected_response: - return True - else: - return False - - def default_id_postfix(self) -> str: - """Returns the default id postfix of the flow step.""" - return "entry_prompt" - - @dataclass class SlotRejection: """A slot rejection.""" From a51a35d465196fb686d82d73f3ba24252c4d6c9a Mon Sep 17 00:00:00 2001 From: Tanja Bunk Date: Mon, 23 Oct 2023 10:30:32 +0200 Subject: [PATCH 2/2] remove entry_prompt from yaml schema --- rasa/shared/core/flows/flows_yaml_schema.json | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/rasa/shared/core/flows/flows_yaml_schema.json b/rasa/shared/core/flows/flows_yaml_schema.json index dd5b08b2a89f..3435b991310d 100644 --- a/rasa/shared/core/flows/flows_yaml_schema.json +++ b/rasa/shared/core/flows/flows_yaml_schema.json @@ -151,23 +151,6 @@ "$ref": "#$defs/next" } } - }, - { - "required": [ - "entry_prompt" - ], - "additionalProperties": false, - "properties": { - "id": { - "type": "string" - }, - "entry_prompt": { - "type": "string" - }, - "next": { - "$ref": "#$defs/next" - } - } } ] }