change configs + revision change for OpenAIChatFlowmodule
Browse files- CF_Code.py +1 -1
- CF_Code.yaml +2 -2
- CF_CodeCritic.yaml +2 -2
- CF_CodeCriticWrongAttempt.yaml +2 -2
- CF_CodeCriticWrongAttemptWithPlan.yaml +2 -2
- CF_CodeDebug.yaml +2 -2
- CF_CodeDebugCollab.yaml +2 -2
- CF_CodeDebugCollabWithPlan.yaml +2 -2
- CF_CodeWithPlan.yaml +2 -2
- CF_PlanCritic.yaml +2 -2
- LC_Code.yaml +2 -2
- LC_CodeCritic.yaml +2 -2
- LC_CodeCriticWrongAttempt.yaml +2 -2
- LC_CodeDebug.yaml +2 -2
- LC_CodeDebugCollab.yaml +2 -2
- LC_CodeWithPlan.yaml +2 -2
- __init__.py +1 -1
CF_Code.py
CHANGED
@@ -3,4 +3,4 @@ from flow_modules.aiflows.OpenAIChatFlowModule import OpenAIChatAtomicFlow
|
|
3 |
|
4 |
class CF_Code(OpenAIChatAtomicFlow):
|
5 |
def __init__(self, **kwargs):
|
6 |
-
super().__init__(**kwargs)
|
|
|
3 |
|
4 |
class CF_Code(OpenAIChatAtomicFlow):
|
5 |
def __init__(self, **kwargs):
|
6 |
+
super().__init__(**kwargs)
|
CF_Code.yaml
CHANGED
@@ -19,8 +19,8 @@ output_interface:
|
|
19 |
# ~~~ Flow specification ~~~
|
20 |
|
21 |
backend:
|
22 |
-
|
23 |
-
api_infos:
|
24 |
wait_time_per_key: 6
|
25 |
model_name:
|
26 |
openai: "gpt-4"
|
|
|
19 |
# ~~~ Flow specification ~~~
|
20 |
|
21 |
backend:
|
22 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
23 |
+
api_infos: ???
|
24 |
wait_time_per_key: 6
|
25 |
model_name:
|
26 |
openai: "gpt-4"
|
CF_CodeCritic.yaml
CHANGED
@@ -19,8 +19,8 @@ output_interface:
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
-
|
23 |
-
api_infos:
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
23 |
+
api_infos: ???
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
CF_CodeCriticWrongAttempt.yaml
CHANGED
@@ -20,8 +20,8 @@ output_interface:
|
|
20 |
|
21 |
# ~~~ Flow specification ~~~
|
22 |
backend:
|
23 |
-
|
24 |
-
api_infos:
|
25 |
model_name:
|
26 |
openai: "gpt-4"
|
27 |
azure: "azure/gpt-4"
|
|
|
20 |
|
21 |
# ~~~ Flow specification ~~~
|
22 |
backend:
|
23 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
24 |
+
api_infos: ???
|
25 |
model_name:
|
26 |
openai: "gpt-4"
|
27 |
azure: "azure/gpt-4"
|
CF_CodeCriticWrongAttemptWithPlan.yaml
CHANGED
@@ -21,8 +21,8 @@ output_interface:
|
|
21 |
|
22 |
# ~~~ Flow specification ~~~
|
23 |
backend:
|
24 |
-
|
25 |
-
api_infos:
|
26 |
model_name:
|
27 |
openai: "gpt-4"
|
28 |
azure: "azure/gpt-4"
|
|
|
21 |
|
22 |
# ~~~ Flow specification ~~~
|
23 |
backend:
|
24 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
25 |
+
api_infos: ???
|
26 |
model_name:
|
27 |
openai: "gpt-4"
|
28 |
azure: "azure/gpt-4"
|
CF_CodeDebug.yaml
CHANGED
@@ -23,8 +23,8 @@ subflows_config:
|
|
23 |
_target_: .CF_Code.instantiate_from_default_config
|
24 |
name: "CodeGenerator"
|
25 |
backend:
|
26 |
-
|
27 |
-
api_infos:
|
28 |
model_name:
|
29 |
openai: "gpt-4"
|
30 |
azure: "azure/gpt-4"
|
|
|
23 |
_target_: .CF_Code.instantiate_from_default_config
|
24 |
name: "CodeGenerator"
|
25 |
backend:
|
26 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
27 |
+
api_infos: ???
|
28 |
model_name:
|
29 |
openai: "gpt-4"
|
30 |
azure: "azure/gpt-4"
|
CF_CodeDebugCollab.yaml
CHANGED
@@ -22,8 +22,8 @@ subflows_config:
|
|
22 |
_target_: .CF_Code.instantiate_from_default_config
|
23 |
name: "CodeGenerator"
|
24 |
backend:
|
25 |
-
|
26 |
-
api_infos:
|
27 |
model_name:
|
28 |
openai: "gpt-4"
|
29 |
azure: "azure/gpt-4"
|
|
|
22 |
_target_: .CF_Code.instantiate_from_default_config
|
23 |
name: "CodeGenerator"
|
24 |
backend:
|
25 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
26 |
+
api_infos: ???
|
27 |
model_name:
|
28 |
openai: "gpt-4"
|
29 |
azure: "azure/gpt-4"
|
CF_CodeDebugCollabWithPlan.yaml
CHANGED
@@ -23,8 +23,8 @@ subflows_config:
|
|
23 |
_target_: .CF_CodeWithPlan.instantiate_from_default_config
|
24 |
name: "CodeGenerator"
|
25 |
backend:
|
26 |
-
|
27 |
-
api_infos:
|
28 |
model_name:
|
29 |
openai: "gpt-4"
|
30 |
azure: "azure/gpt-4"
|
|
|
23 |
_target_: .CF_CodeWithPlan.instantiate_from_default_config
|
24 |
name: "CodeGenerator"
|
25 |
backend:
|
26 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
27 |
+
api_infos: ???
|
28 |
model_name:
|
29 |
openai: "gpt-4"
|
30 |
azure: "azure/gpt-4"
|
CF_CodeWithPlan.yaml
CHANGED
@@ -19,8 +19,8 @@ output_interface:
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
-
|
23 |
-
api_infos:
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
23 |
+
api_infos: ???
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
CF_PlanCritic.yaml
CHANGED
@@ -19,8 +19,8 @@ output_interface:
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
-
|
23 |
-
api_infos:
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
23 |
+
api_infos: ???
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
LC_Code.yaml
CHANGED
@@ -18,8 +18,8 @@ output_interface:
|
|
18 |
|
19 |
# ~~~ Flow specification ~~~
|
20 |
backend:
|
21 |
-
|
22 |
-
api_infos:
|
23 |
model_name:
|
24 |
openai: "gpt-4"
|
25 |
azure: "azure/gpt-4"
|
|
|
18 |
|
19 |
# ~~~ Flow specification ~~~
|
20 |
backend:
|
21 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
22 |
+
api_infos: ???
|
23 |
model_name:
|
24 |
openai: "gpt-4"
|
25 |
azure: "azure/gpt-4"
|
LC_CodeCritic.yaml
CHANGED
@@ -19,8 +19,8 @@ output_interface:
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
-
|
23 |
-
api_infos:
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
23 |
+
api_infos: ???
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
LC_CodeCriticWrongAttempt.yaml
CHANGED
@@ -20,8 +20,8 @@ output_interface:
|
|
20 |
|
21 |
# ~~~ Flow specification ~~~
|
22 |
backend:
|
23 |
-
|
24 |
-
api_infos:
|
25 |
model_name:
|
26 |
openai: "gpt-4"
|
27 |
azure: "azure/gpt-4"
|
|
|
20 |
|
21 |
# ~~~ Flow specification ~~~
|
22 |
backend:
|
23 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
24 |
+
api_infos: ???
|
25 |
model_name:
|
26 |
openai: "gpt-4"
|
27 |
azure: "azure/gpt-4"
|
LC_CodeDebug.yaml
CHANGED
@@ -23,8 +23,8 @@ subflows_config:
|
|
23 |
_target_: .LC_Code.instantiate_from_default_config
|
24 |
name: "CodeGenerator"
|
25 |
backend:
|
26 |
-
|
27 |
-
api_infos:
|
28 |
model_name:
|
29 |
openai: "gpt-4"
|
30 |
azure: "azure/gpt-4"
|
|
|
23 |
_target_: .LC_Code.instantiate_from_default_config
|
24 |
name: "CodeGenerator"
|
25 |
backend:
|
26 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
27 |
+
api_infos: ???
|
28 |
model_name:
|
29 |
openai: "gpt-4"
|
30 |
azure: "azure/gpt-4"
|
LC_CodeDebugCollab.yaml
CHANGED
@@ -22,8 +22,8 @@ subflows_config:
|
|
22 |
_target_: .LC_Code.instantiate_from_default_config
|
23 |
name: "CodeGenerator"
|
24 |
backend:
|
25 |
-
|
26 |
-
api_infos:
|
27 |
model_name:
|
28 |
openai: "gpt-4"
|
29 |
azure: "azure/gpt-4"
|
|
|
22 |
_target_: .LC_Code.instantiate_from_default_config
|
23 |
name: "CodeGenerator"
|
24 |
backend:
|
25 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
26 |
+
api_infos: ???
|
27 |
model_name:
|
28 |
openai: "gpt-4"
|
29 |
azure: "azure/gpt-4"
|
LC_CodeWithPlan.yaml
CHANGED
@@ -19,8 +19,8 @@ output_interface:
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
-
|
23 |
-
api_infos:
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
|
|
19 |
|
20 |
# ~~~ Flow specification ~~~
|
21 |
backend:
|
22 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
23 |
+
api_infos: ???
|
24 |
model_name:
|
25 |
openai: "gpt-4"
|
26 |
azure: "azure/gpt-4"
|
__init__.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
# ~~~ Specify the dependencies ~~~
|
2 |
dependencies = [
|
3 |
-
{"url": "aiflows/OpenAIChatFlowModule", "revision": "
|
4 |
{"url": "aiflows/FixedReplyFlowModule", "revision": "65fbdbe19f5a8fdc48810810812552c5674d35a5"},
|
5 |
]
|
6 |
|
|
|
1 |
# ~~~ Specify the dependencies ~~~
|
2 |
dependencies = [
|
3 |
+
{"url": "aiflows/OpenAIChatFlowModule", "revision": "eeec09b71e967ce426553e2300c5689f6ea6a662"},
|
4 |
{"url": "aiflows/FixedReplyFlowModule", "revision": "65fbdbe19f5a8fdc48810810812552c5674d35a5"},
|
5 |
]
|
6 |
|