|
from typing import Dict, Any |
|
|
|
from aiflows.base_flows import CompositeFlow |
|
from aiflows.utils import logging |
|
|
|
from .ControllerAtomicFlow import ControllerAtomicFlow |
|
from aiflows.messages import FlowMessage |
|
from aiflows.interfaces import KeyInterface |
|
logging.set_verbosity_debug() |
|
log = logging.get_logger(__name__) |
|
|
|
|
|
class ControllerExecutorFlow(CompositeFlow): |
|
""" This class implements a ControllerExecutorFlow. It's composed of a ControllerAtomicFlow and an ExecutorFlow. |
|
Where typically the ControllerAtomicFlow is uses a LLM to decide which command to call and the ExecutorFlow (branching flow) is used to execute the command. |
|
|
|
It contains the following subflows: |
|
|
|
- A Controller Atomic Flow: It is a flow that to decides which command to get closer to completing it's task of accomplishing a given goal. |
|
- An Executor Flow: It is a branching flow that uses the executes the command instructed by the ControllerAtomicFlow. |
|
|
|
An illustration of the flow is as follows: |
|
|
|
goal -----|-----> ControllerFlow----->|-----> (anwser,status) |
|
^ | |
|
| | |
|
| v |
|
|<----- ExecutorFlow <------| |
|
|
|
*Configuration Parameters*: |
|
|
|
- `name` (str): The name of the flow. Default: "CtrlEx" |
|
- `description` (str): A description of the flow. This description is used to generate the help message of the flow. |
|
Default: "ControllerExecutor (i.e., MRKL, ReAct) interaction implementation with Flows |
|
that approaches the problem solving in two phases: one Flow chooses the next step and another Flow executes it. |
|
This is repeated until the controller Flow concludes on an answer." |
|
- `max_rounds` (int): The maximum number of rounds the flow can run for. |
|
Default: 30. |
|
- `subflows_config` (Dict[str,Any]): A dictionary of the subflows configurations. Default: |
|
- `Controller`: The configuration of the Controller Flow. By default, it a ControllerAtomicFlow. Default parameters: |
|
- `finish` (Dict[str,Any]): The configuration of the finish command. Default parameters: |
|
- `description` (str): The description of the command. |
|
Default: "Signal that the objective has been satisfied, and returns the answer to the user." |
|
- `input_args` (List[str]): The input arguments of the command. Default: ["answer"] |
|
- All other parameters are inherited from the default configuration of ControllerAtomicFlow (see ControllerAtomicFlow) |
|
- `Executor`: The configuration of the Executor Flow. By default, it's a BranchingFlow. There are no default parameters, the flow |
|
parameter to to be defined is: |
|
- `subflows_config` (Dict[str,Any]): A dictionary of the configuration of the subflows of the branching flow. |
|
These subflows are typically also the possible commands of the Controller Flow. Default: [] |
|
- `early_exit_key` (str): The key that is used to exit the flow. Default: "EARLY_EXIT" |
|
- `topology` (str): The topology of the flow which is "circular". |
|
By default, the topology is the one shown in the illustration above |
|
(the topology is also described in ControllerExecutorFlow.yaml). |
|
|
|
|
|
*Input Interface*: |
|
|
|
- `goal` (str): The goal of the controller. Usually asked by the user/human (e.g. "I want to know the occupation and birth date of Michael Jordan.") |
|
|
|
*Output Interface*: |
|
|
|
- `answer` (str): The answer of the flow to the query (e.g. "Michael Jordan is a basketball player and business man. He was born on February 17, 1963.") |
|
- `status` (str): The status of the flow. It can be "finished" or "unfinished". If the status is "unfinished", it's usually because the maximum amount of rounds was reached before the model found an answer. |
|
|
|
:param flow_config: The configuration of the flow (see Configuration Parameters). |
|
:param subflows: A list of subflows. Required when instantiating the subflow programmatically (it replaces subflows_config from flow_config). |
|
""" |
|
|
|
def __init__(self,**kwargs): |
|
super().__init__(**kwargs) |
|
|
|
self.input_interface_controller = KeyInterface( |
|
keys_to_select = ["goal","observation"], |
|
) |
|
self.input_interface_first_round_controller = KeyInterface( |
|
keys_to_select = ["goal"], |
|
) |
|
|
|
self.reply_interface = KeyInterface( |
|
keys_to_select = ["answer","status"], |
|
) |
|
|
|
self.next_state = { |
|
None: "Controller", |
|
"Controller": "Executor", |
|
"Executor": "Controller" |
|
} |
|
|
|
def generate_reply(self): |
|
""" This method generates the reply of the flow. It's called when the flow is finished. """ |
|
|
|
reply = self.package_output_message( |
|
input_message = self.flow_state["input_message"], |
|
response = self.reply_interface(self.flow_state) |
|
) |
|
self.send_message(reply) |
|
|
|
def get_next_state(self): |
|
""" """ |
|
if self.flow_config["max_rounds"] is not None and self.flow_state["current_round"] >= self.flow_config["max_rounds"]: |
|
return None |
|
|
|
return self.next_state[self.flow_state["last_state"]] |
|
|
|
def _on_reach_max_round(self): |
|
""" This method is called when the flow reaches the maximum amount of rounds. It updates the state of the flow and starts the process of terminating the flow.""" |
|
self._state_update_dict({ |
|
"answer": "The maximum amount of rounds was reached before the model found an answer.", |
|
"status": "unfinished" |
|
}) |
|
|
|
def set_up_flow_state(self): |
|
""" Sets up the flow state.""" |
|
super().set_up_flow_state() |
|
self.flow_state["last_state"] = None |
|
self.flow_state["current_round"] = 0 |
|
|
|
|
|
def call_controller(self): |
|
""" Calls the controller: the flow that decides which command to call next.""" |
|
|
|
if self.flow_state["current_round"] == 0: |
|
input_interface = self.input_interface_first_round_controller |
|
else: |
|
input_interface = self.input_interface_controller |
|
|
|
message = self.package_input_message( |
|
data = input_interface(self.flow_state), |
|
dst_flow = "Controller" |
|
) |
|
|
|
self.subflows["Controller"].get_reply( |
|
message, |
|
) |
|
|
|
def call_executor(self): |
|
""" Calls the flow that executes the command instructed by the ControllerAtomicFlow.""" |
|
|
|
|
|
executor_branch_to_call = self.flow_state["command"] |
|
message = self.package_input_message( |
|
data = self.flow_state["command_args"], |
|
dst_flow = executor_branch_to_call |
|
) |
|
|
|
self.subflows[executor_branch_to_call].get_reply( |
|
message, |
|
) |
|
|
|
|
|
def register_data_to_state(self, input_message): |
|
"""This method registers the input message data to the flow state. It's everytime a new input message is received. |
|
|
|
:param input_message: The input message |
|
:type input_message: FlowMessage |
|
""" |
|
last_state = self.flow_state["last_state"] |
|
|
|
if last_state is None: |
|
self.flow_state["input_message"] = input_message |
|
self.flow_state["goal"] = input_message.data["goal"] |
|
|
|
elif last_state == "Executor": |
|
self.flow_state["observation"] = input_message.data |
|
|
|
else: |
|
|
|
self._state_update_dict( |
|
{ |
|
"command": input_message.data["command"], |
|
"command_args": input_message.data["command_args"] |
|
} |
|
) |
|
|
|
|
|
if self.flow_state["command"] == "finish": |
|
|
|
self._state_update_dict( |
|
{ |
|
"early_exit_flag": True, |
|
"answer": self.flow_state["command_args"]["answer"], |
|
"status": "finished" |
|
} |
|
) |
|
|
|
|
|
|
|
|
|
def run(self,input_message: FlowMessage): |
|
""" Runs the WikiSearch Atomic Flow. It's used to execute a Wikipedia search and get page summaries. |
|
|
|
:param input_message: The input message |
|
:type input_message: FlowMessage |
|
""" |
|
|
|
self.register_data_to_state(input_message) |
|
|
|
current_state = self.get_next_state() |
|
|
|
if self.flow_state.get("early_exit_flag",False): |
|
self.generate_reply() |
|
|
|
elif current_state == "Controller": |
|
self.call_controller() |
|
|
|
elif current_state == "Executor": |
|
self.call_executor() |
|
self.flow_state["current_round"] += 1 |
|
|
|
else: |
|
self._on_reach_max_round() |
|
self.generate_reply() |
|
|
|
|
|
if self.flow_state.get("early_exit_flag",False) or current_state is None: |
|
self.flow_state["last_state"] = None |
|
else: |
|
self.flow_state["last_state"] = current_state |
|
|