modified for new backend
Browse files- .gitignore +1 -0
- ChromaDBFlow.py +26 -3
- ChromaDBFlow.yaml +4 -0
- VectorStoreFlow.py +24 -7
- VectorStoreFlow.yaml +4 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
__pycache__/*
|
ChromaDBFlow.py
CHANGED
@@ -2,21 +2,44 @@ import os
|
|
2 |
from typing import Dict, List, Any
|
3 |
|
4 |
import uuid
|
5 |
-
|
6 |
from langchain.embeddings import OpenAIEmbeddings
|
7 |
|
8 |
from chromadb import Client as ChromaClient
|
9 |
|
10 |
from flows.base_flows import AtomicFlow
|
11 |
|
|
|
12 |
|
13 |
class ChromaDBFlow(AtomicFlow):
|
14 |
|
15 |
-
def __init__(self,
|
16 |
super().__init__(**kwargs)
|
17 |
self.client = ChromaClient()
|
18 |
self.collection = self.client.get_or_create_collection(name=self.flow_config["name"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
|
|
|
|
|
|
20 |
def get_input_keys(self) -> List[str]:
|
21 |
return self.flow_config["input_keys"]
|
22 |
|
@@ -25,7 +48,7 @@ class ChromaDBFlow(AtomicFlow):
|
|
25 |
|
26 |
def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
|
27 |
|
28 |
-
api_information = self.
|
29 |
|
30 |
if api_information.backend_used == "openai":
|
31 |
embeddings = OpenAIEmbeddings(openai_api_key=api_information.api_key)
|
|
|
2 |
from typing import Dict, List, Any
|
3 |
|
4 |
import uuid
|
5 |
+
from copy import deepcopy
|
6 |
from langchain.embeddings import OpenAIEmbeddings
|
7 |
|
8 |
from chromadb import Client as ChromaClient
|
9 |
|
10 |
from flows.base_flows import AtomicFlow
|
11 |
|
12 |
+
import hydra
|
13 |
|
14 |
class ChromaDBFlow(AtomicFlow):
|
15 |
|
16 |
+
def __init__(self, backend,**kwargs):
|
17 |
super().__init__(**kwargs)
|
18 |
self.client = ChromaClient()
|
19 |
self.collection = self.client.get_or_create_collection(name=self.flow_config["name"])
|
20 |
+
self.backend = backend
|
21 |
+
|
22 |
+
@classmethod
|
23 |
+
def _set_up_backend(cls, config):
|
24 |
+
kwargs = {}
|
25 |
+
|
26 |
+
kwargs["backend"] = \
|
27 |
+
hydra.utils.instantiate(config['backend'], _convert_="partial")
|
28 |
+
|
29 |
+
return kwargs
|
30 |
+
|
31 |
+
@classmethod
|
32 |
+
def instantiate_from_config(cls, config):
|
33 |
+
flow_config = deepcopy(config)
|
34 |
+
|
35 |
+
kwargs = {"flow_config": flow_config}
|
36 |
+
|
37 |
+
# ~~~ Set up backend ~~~
|
38 |
+
kwargs.update(cls._set_up_backend(flow_config))
|
39 |
|
40 |
+
# ~~~ Instantiate flow ~~~
|
41 |
+
return cls(**kwargs)
|
42 |
+
|
43 |
def get_input_keys(self) -> List[str]:
|
44 |
return self.flow_config["input_keys"]
|
45 |
|
|
|
48 |
|
49 |
def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
|
50 |
|
51 |
+
api_information = self.backend.get_key()
|
52 |
|
53 |
if api_information.backend_used == "openai":
|
54 |
embeddings = OpenAIEmbeddings(openai_api_key=api_information.api_key)
|
ChromaDBFlow.yaml
CHANGED
@@ -1,6 +1,10 @@
|
|
1 |
name: chroma_db
|
2 |
description: ChromaDB is a document store that uses vector embeddings to store and retrieve documents
|
3 |
|
|
|
|
|
|
|
|
|
4 |
input_keys:
|
5 |
- operation
|
6 |
- content
|
|
|
1 |
name: chroma_db
|
2 |
description: ChromaDB is a document store that uses vector embeddings to store and retrieve documents
|
3 |
|
4 |
+
backend:
|
5 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
6 |
+
api_infos: ???
|
7 |
+
|
8 |
input_keys:
|
9 |
- operation
|
10 |
- content
|
VectorStoreFlow.py
CHANGED
@@ -10,20 +10,33 @@ from langchain.vectorstores import Chroma, FAISS
|
|
10 |
from langchain.vectorstores.base import VectorStoreRetriever
|
11 |
|
12 |
from flows.base_flows import AtomicFlow
|
|
|
13 |
|
14 |
|
15 |
class VectorStoreFlow(AtomicFlow):
|
16 |
-
REQUIRED_KEYS_CONFIG = ["type"
|
17 |
|
18 |
vector_db: VectorStoreRetriever
|
19 |
|
20 |
-
def __init__(self, vector_db, **kwargs):
|
21 |
super().__init__(**kwargs)
|
22 |
self.vector_db = vector_db
|
23 |
|
|
|
24 |
@classmethod
|
25 |
-
def
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
kwargs = {}
|
28 |
|
29 |
vs_type = config["type"]
|
@@ -50,9 +63,13 @@ class VectorStoreFlow(AtomicFlow):
|
|
50 |
flow_config = deepcopy(config)
|
51 |
|
52 |
kwargs = {"flow_config": flow_config}
|
53 |
-
|
54 |
-
|
55 |
-
|
|
|
|
|
|
|
|
|
56 |
return cls(**kwargs)
|
57 |
|
58 |
@staticmethod
|
|
|
10 |
from langchain.vectorstores.base import VectorStoreRetriever
|
11 |
|
12 |
from flows.base_flows import AtomicFlow
|
13 |
+
import hydra
|
14 |
|
15 |
|
16 |
class VectorStoreFlow(AtomicFlow):
|
17 |
+
REQUIRED_KEYS_CONFIG = ["type"]
|
18 |
|
19 |
vector_db: VectorStoreRetriever
|
20 |
|
21 |
+
def __init__(self, backend,vector_db, **kwargs):
|
22 |
super().__init__(**kwargs)
|
23 |
self.vector_db = vector_db
|
24 |
|
25 |
+
|
26 |
@classmethod
|
27 |
+
def _set_up_backend(cls, config):
|
28 |
+
kwargs = {}
|
29 |
+
|
30 |
+
kwargs["backend"] = \
|
31 |
+
hydra.utils.instantiate(config['backend'], _convert_="partial")
|
32 |
+
|
33 |
+
return kwargs
|
34 |
+
|
35 |
+
|
36 |
+
@classmethod
|
37 |
+
def _set_up_retriever(cls, api_information,config: Dict[str, Any]) -> Dict[str, Any]:
|
38 |
+
|
39 |
+
embeddings = OpenAIEmbeddings(openai_api_key=api_information.api_key)
|
40 |
kwargs = {}
|
41 |
|
42 |
vs_type = config["type"]
|
|
|
63 |
flow_config = deepcopy(config)
|
64 |
|
65 |
kwargs = {"flow_config": flow_config}
|
66 |
+
|
67 |
+
# ~~~ Set up backend ~~~
|
68 |
+
kwargs.update(cls._set_up_backend(flow_config))
|
69 |
+
api_information = kwargs["backend"].get_key()
|
70 |
+
|
71 |
+
kwargs.update(cls._set_up_retriever(api_information,flow_config))
|
72 |
+
|
73 |
return cls(**kwargs)
|
74 |
|
75 |
@staticmethod
|
VectorStoreFlow.yaml
CHANGED
@@ -1,6 +1,10 @@
|
|
1 |
name: "VectorStoreFlow"
|
2 |
description: "VectorStoreFlow"
|
3 |
|
|
|
|
|
|
|
|
|
4 |
input_keys:
|
5 |
- "operation" # read or write
|
6 |
- "content"
|
|
|
1 |
name: "VectorStoreFlow"
|
2 |
description: "VectorStoreFlow"
|
3 |
|
4 |
+
backend:
|
5 |
+
_target_: flows.backends.llm_lite.LiteLLMBackend
|
6 |
+
api_infos: ?
|
7 |
+
|
8 |
input_keys:
|
9 |
- "operation" # read or write
|
10 |
- "content"
|