Spaces:
Sleeping
Sleeping
Chen
commited on
Commit
·
b404642
1
Parent(s):
d043c95
reformat
Browse files- app.py +3 -1
- core/helper.py +12 -5
- core/lifecycle.py +2 -2
- langchain_manager/manager.py +1 -1
- llama/context.py +37 -19
- llama/data_loader.py +7 -7
- xpipe_wiki/manager_factory.py +14 -9
- xpipe_wiki/robot_manager.py +19 -5
app.py
CHANGED
@@ -28,7 +28,9 @@ with st.sidebar:
|
|
28 |
|
29 |
def main() -> None:
|
30 |
st.header("X-Pipe Wiki 机器人 💬")
|
31 |
-
robot_manager = XPipeRobotManagerFactory.get_or_create(
|
|
|
|
|
32 |
robot = robot_manager.get_robot()
|
33 |
query = st.text_input("X-Pipe Wiki 问题:")
|
34 |
if query:
|
|
|
28 |
|
29 |
def main() -> None:
|
30 |
st.header("X-Pipe Wiki 机器人 💬")
|
31 |
+
robot_manager = XPipeRobotManagerFactory.get_or_create(
|
32 |
+
XPipeRobotRevision.SIMPLE_OPENAI_VERSION_0
|
33 |
+
)
|
34 |
robot = robot_manager.get_robot()
|
35 |
query = st.text_input("X-Pipe Wiki 问题:")
|
36 |
if query:
|
core/helper.py
CHANGED
@@ -2,23 +2,30 @@ from core.lifecycle import Lifecycle
|
|
2 |
|
3 |
|
4 |
class LifecycleHelper:
|
5 |
-
|
6 |
@classmethod
|
7 |
def initialize_if_possible(cls, ls: Lifecycle) -> None:
|
8 |
-
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_initialize(
|
|
|
|
|
9 |
ls.initialize()
|
10 |
|
11 |
@classmethod
|
12 |
def start_if_possible(cls, ls: Lifecycle) -> None:
|
13 |
-
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_start(
|
|
|
|
|
14 |
ls.start()
|
15 |
|
16 |
@classmethod
|
17 |
def stop_if_possible(cls, ls: Lifecycle) -> None:
|
18 |
-
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_stop(
|
|
|
|
|
19 |
ls.stop()
|
20 |
|
21 |
@classmethod
|
22 |
def dispose_if_possible(cls, ls: Lifecycle) -> None:
|
23 |
-
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_dispose(
|
|
|
|
|
24 |
ls.dispose()
|
|
|
2 |
|
3 |
|
4 |
class LifecycleHelper:
|
|
|
5 |
@classmethod
|
6 |
def initialize_if_possible(cls, ls: Lifecycle) -> None:
|
7 |
+
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_initialize(
|
8 |
+
ls.lifecycle_state.phase
|
9 |
+
):
|
10 |
ls.initialize()
|
11 |
|
12 |
@classmethod
|
13 |
def start_if_possible(cls, ls: Lifecycle) -> None:
|
14 |
+
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_start(
|
15 |
+
ls.lifecycle_state.phase
|
16 |
+
):
|
17 |
ls.start()
|
18 |
|
19 |
@classmethod
|
20 |
def stop_if_possible(cls, ls: Lifecycle) -> None:
|
21 |
+
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_stop(
|
22 |
+
ls.lifecycle_state.phase
|
23 |
+
):
|
24 |
ls.stop()
|
25 |
|
26 |
@classmethod
|
27 |
def dispose_if_possible(cls, ls: Lifecycle) -> None:
|
28 |
+
if isinstance(ls, Lifecycle) and ls.get_lifecycle_state().can_dispose(
|
29 |
+
ls.lifecycle_state.phase
|
30 |
+
):
|
31 |
ls.dispose()
|
core/lifecycle.py
CHANGED
@@ -112,7 +112,7 @@ class LifecycleController(ABC):
|
|
112 |
|
113 |
def can_start(self, phase: Optional[LifecyclePhase]) -> bool:
|
114 |
return phase is not None and (
|
115 |
-
|
116 |
)
|
117 |
|
118 |
def can_stop(self, phase: Optional[LifecyclePhase]) -> bool:
|
@@ -120,7 +120,7 @@ class LifecycleController(ABC):
|
|
120 |
|
121 |
def can_dispose(self, phase: Optional[LifecyclePhase]) -> bool:
|
122 |
return phase is not None and (
|
123 |
-
|
124 |
)
|
125 |
|
126 |
|
|
|
112 |
|
113 |
def can_start(self, phase: Optional[LifecyclePhase]) -> bool:
|
114 |
return phase is not None and (
|
115 |
+
phase == LifecyclePhase.INITIALIZED or phase == LifecyclePhase.STOPPED
|
116 |
)
|
117 |
|
118 |
def can_stop(self, phase: Optional[LifecyclePhase]) -> bool:
|
|
|
120 |
|
121 |
def can_dispose(self, phase: Optional[LifecyclePhase]) -> bool:
|
122 |
return phase is not None and (
|
123 |
+
phase == LifecyclePhase.INITIALIZED or phase == LifecyclePhase.STOPPED
|
124 |
)
|
125 |
|
126 |
|
langchain_manager/manager.py
CHANGED
@@ -8,7 +8,7 @@ from langchain_manager.llms import AzureOpenAI
|
|
8 |
from core.lifecycle import Lifecycle
|
9 |
|
10 |
|
11 |
-
class BaseLangChainManager(
|
12 |
def __init__(self) -> None:
|
13 |
super().__init__()
|
14 |
|
|
|
8 |
from core.lifecycle import Lifecycle
|
9 |
|
10 |
|
11 |
+
class BaseLangChainManager(ABC):
|
12 |
def __init__(self) -> None:
|
13 |
super().__init__()
|
14 |
|
llama/context.py
CHANGED
@@ -5,10 +5,10 @@ from llama_index import StorageContext
|
|
5 |
|
6 |
from core.lifecycle import Lifecycle
|
7 |
from langchain_manager.manager import BaseLangChainManager
|
|
|
8 |
|
9 |
|
10 |
class ServiceContextManager(Lifecycle, ABC):
|
11 |
-
|
12 |
@abstractmethod
|
13 |
def get_service_context(self) -> ServiceContext:
|
14 |
pass
|
@@ -44,23 +44,33 @@ class AzureServiceContextManager(ServiceContextManager):
|
|
44 |
)
|
45 |
|
46 |
def do_start(self) -> None:
|
47 |
-
self.logger.info(
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
51 |
|
52 |
def do_stop(self) -> None:
|
53 |
-
self.logger.info(
|
54 |
-
|
55 |
-
|
56 |
-
|
|
|
|
|
|
|
|
|
57 |
|
58 |
def do_dispose(self) -> None:
|
59 |
-
self.logger.info(
|
|
|
|
|
|
|
60 |
|
61 |
|
62 |
class StorageContextManager(Lifecycle, ABC):
|
63 |
-
|
64 |
@abstractmethod
|
65 |
def get_storage_context(self) -> StorageContext:
|
66 |
pass
|
@@ -69,9 +79,11 @@ class StorageContextManager(Lifecycle, ABC):
|
|
69 |
class LocalStorageContextManager(StorageContextManager):
|
70 |
storage_context: StorageContext
|
71 |
|
72 |
-
def __init__(
|
73 |
-
|
74 |
-
|
|
|
|
|
75 |
super().__init__()
|
76 |
self.dataset_path = dataset_path
|
77 |
self.service_context_manager = service_context_manager
|
@@ -81,8 +93,11 @@ class LocalStorageContextManager(StorageContextManager):
|
|
81 |
|
82 |
def do_init(self) -> None:
|
83 |
from llama.utils import is_local_storage_files_ready
|
|
|
84 |
if is_local_storage_files_ready(self.dataset_path):
|
85 |
-
self.storage_context = StorageContext.from_defaults(
|
|
|
|
|
86 |
else:
|
87 |
docs = self._download()
|
88 |
self._indexing(docs)
|
@@ -96,14 +111,17 @@ class LocalStorageContextManager(StorageContextManager):
|
|
96 |
def do_dispose(self) -> None:
|
97 |
self.storage_context.persist(self.dataset_path)
|
98 |
|
99 |
-
def _download(self) -> [Document]:
|
100 |
from llama.data_loader import GithubLoader
|
|
|
101 |
loader = GithubLoader()
|
102 |
return loader.load()
|
103 |
|
104 |
-
def _indexing(self, docs: [Document]):
|
105 |
from llama_index import GPTVectorStoreIndex
|
106 |
-
|
107 |
-
|
|
|
|
|
108 |
index.storage_context.persist(persist_dir=self.dataset_path)
|
109 |
self.storage_context = index.storage_context
|
|
|
5 |
|
6 |
from core.lifecycle import Lifecycle
|
7 |
from langchain_manager.manager import BaseLangChainManager
|
8 |
+
from typing import List
|
9 |
|
10 |
|
11 |
class ServiceContextManager(Lifecycle, ABC):
|
|
|
12 |
@abstractmethod
|
13 |
def get_service_context(self) -> ServiceContext:
|
14 |
pass
|
|
|
44 |
)
|
45 |
|
46 |
def do_start(self) -> None:
|
47 |
+
self.logger.info(
|
48 |
+
"[do_start][embedding] last used usage: %d",
|
49 |
+
self.service_context.embed_model.total_tokens_used,
|
50 |
+
)
|
51 |
+
self.logger.info(
|
52 |
+
"[do_start][predict] last used usage: %d",
|
53 |
+
self.service_context.llm_predictor.total_tokens_used,
|
54 |
+
)
|
55 |
|
56 |
def do_stop(self) -> None:
|
57 |
+
self.logger.info(
|
58 |
+
"[do_stop][embedding] last used usage: %d",
|
59 |
+
self.service_context.embed_model.total_tokens_used,
|
60 |
+
)
|
61 |
+
self.logger.info(
|
62 |
+
"[do_stop][predict] last used usage: %d",
|
63 |
+
self.service_context.llm_predictor.total_tokens_used,
|
64 |
+
)
|
65 |
|
66 |
def do_dispose(self) -> None:
|
67 |
+
self.logger.info(
|
68 |
+
"[do_dispose] total used token: %d",
|
69 |
+
self.service_context.llm_predictor.total_tokens_used,
|
70 |
+
)
|
71 |
|
72 |
|
73 |
class StorageContextManager(Lifecycle, ABC):
|
|
|
74 |
@abstractmethod
|
75 |
def get_storage_context(self) -> StorageContext:
|
76 |
pass
|
|
|
79 |
class LocalStorageContextManager(StorageContextManager):
|
80 |
storage_context: StorageContext
|
81 |
|
82 |
+
def __init__(
|
83 |
+
self,
|
84 |
+
service_context_manager: ServiceContextManager,
|
85 |
+
dataset_path: str = "./dataset",
|
86 |
+
) -> None:
|
87 |
super().__init__()
|
88 |
self.dataset_path = dataset_path
|
89 |
self.service_context_manager = service_context_manager
|
|
|
93 |
|
94 |
def do_init(self) -> None:
|
95 |
from llama.utils import is_local_storage_files_ready
|
96 |
+
|
97 |
if is_local_storage_files_ready(self.dataset_path):
|
98 |
+
self.storage_context = StorageContext.from_defaults(
|
99 |
+
persist_dir=self.dataset_path
|
100 |
+
)
|
101 |
else:
|
102 |
docs = self._download()
|
103 |
self._indexing(docs)
|
|
|
111 |
def do_dispose(self) -> None:
|
112 |
self.storage_context.persist(self.dataset_path)
|
113 |
|
114 |
+
def _download(self) -> List[Document]:
|
115 |
from llama.data_loader import GithubLoader
|
116 |
+
|
117 |
loader = GithubLoader()
|
118 |
return loader.load()
|
119 |
|
120 |
+
def _indexing(self, docs: List[Document]) -> None:
|
121 |
from llama_index import GPTVectorStoreIndex
|
122 |
+
|
123 |
+
index = GPTVectorStoreIndex.from_documents(
|
124 |
+
docs, service_context=self.service_context_manager.get_service_context()
|
125 |
+
)
|
126 |
index.storage_context.persist(persist_dir=self.dataset_path)
|
127 |
self.storage_context = index.storage_context
|
llama/data_loader.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import os
|
2 |
import pickle
|
3 |
from abc import abstractmethod, ABC
|
4 |
-
from typing import Optional, Sequence
|
5 |
|
6 |
from llama_hub.github_repo import GithubRepositoryReader, GithubClient
|
7 |
from llama_index import download_loader
|
@@ -10,16 +10,16 @@ from llama_index.readers.schema.base import Document
|
|
10 |
|
11 |
class WikiLoader(ABC):
|
12 |
@abstractmethod
|
13 |
-
def load(self) -> [Document]:
|
14 |
pass
|
15 |
|
16 |
|
17 |
class GithubLoader(WikiLoader):
|
18 |
def __init__(
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
):
|
24 |
super().__init__()
|
25 |
self.owner = (
|
@@ -28,7 +28,7 @@ class GithubLoader(WikiLoader):
|
|
28 |
self.repo = repo if repo is not None else os.environ["GITHUB_REPO"]
|
29 |
self.dirs = dirs if dirs is not None else [".", "doc"]
|
30 |
|
31 |
-
def load(self) -> [Document]:
|
32 |
download_loader("GithubRepositoryReader")
|
33 |
docs = None
|
34 |
if os.path.exists("docs/docs.pkl"):
|
|
|
1 |
import os
|
2 |
import pickle
|
3 |
from abc import abstractmethod, ABC
|
4 |
+
from typing import Optional, Sequence, List
|
5 |
|
6 |
from llama_hub.github_repo import GithubRepositoryReader, GithubClient
|
7 |
from llama_index import download_loader
|
|
|
10 |
|
11 |
class WikiLoader(ABC):
|
12 |
@abstractmethod
|
13 |
+
def load(self) -> List[Document]:
|
14 |
pass
|
15 |
|
16 |
|
17 |
class GithubLoader(WikiLoader):
|
18 |
def __init__(
|
19 |
+
self,
|
20 |
+
github_owner: Optional[str] = None,
|
21 |
+
repo: Optional[str] = None,
|
22 |
+
dirs: Optional[Sequence[str]] = None,
|
23 |
):
|
24 |
super().__init__()
|
25 |
self.owner = (
|
|
|
28 |
self.repo = repo if repo is not None else os.environ["GITHUB_REPO"]
|
29 |
self.dirs = dirs if dirs is not None else [".", "doc"]
|
30 |
|
31 |
+
def load(self) -> List[Document]:
|
32 |
download_loader("GithubRepositoryReader")
|
33 |
docs = None
|
34 |
if os.path.exists("docs/docs.pkl"):
|
xpipe_wiki/manager_factory.py
CHANGED
@@ -9,11 +9,10 @@ class XPipeRobotRevision(enum.Enum):
|
|
9 |
SIMPLE_OPENAI_VERSION_0 = 1
|
10 |
|
11 |
|
12 |
-
CAPABLE = dict
|
13 |
|
14 |
|
15 |
class XPipeRobotManagerFactory:
|
16 |
-
|
17 |
@classmethod
|
18 |
def get_or_create(cls, revision: XPipeRobotRevision) -> XPipeWikiRobotManager:
|
19 |
if CAPABLE.get(revision) is not None:
|
@@ -25,18 +24,24 @@ class XPipeRobotManagerFactory:
|
|
25 |
|
26 |
@classmethod
|
27 |
def create_simple_openai_version_0(cls) -> AzureXPipeWikiRobotManager:
|
28 |
-
|
29 |
from llama.context import AzureServiceContextManager
|
30 |
from langchain_manager.manager import LangChainAzureManager
|
31 |
-
|
|
|
|
|
|
|
32 |
|
33 |
from llama.context import LocalStorageContextManager
|
34 |
-
dataset_path = os.getenv("XPIPE_WIKI_DATASET_PATH", "./dataset")
|
35 |
-
storage_context_manager = LocalStorageContextManager(dataset_path=dataset_path,
|
36 |
-
service_context_manager=service_context_manager)
|
37 |
|
38 |
-
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
LifecycleHelper.initialize_if_possible(robot_manager)
|
41 |
LifecycleHelper.start_if_possible(robot_manager)
|
42 |
return robot_manager
|
|
|
9 |
SIMPLE_OPENAI_VERSION_0 = 1
|
10 |
|
11 |
|
12 |
+
CAPABLE = dict()
|
13 |
|
14 |
|
15 |
class XPipeRobotManagerFactory:
|
|
|
16 |
@classmethod
|
17 |
def get_or_create(cls, revision: XPipeRobotRevision) -> XPipeWikiRobotManager:
|
18 |
if CAPABLE.get(revision) is not None:
|
|
|
24 |
|
25 |
@classmethod
|
26 |
def create_simple_openai_version_0(cls) -> AzureXPipeWikiRobotManager:
|
|
|
27 |
from llama.context import AzureServiceContextManager
|
28 |
from langchain_manager.manager import LangChainAzureManager
|
29 |
+
|
30 |
+
service_context_manager = AzureServiceContextManager(
|
31 |
+
lc_manager=LangChainAzureManager()
|
32 |
+
)
|
33 |
|
34 |
from llama.context import LocalStorageContextManager
|
|
|
|
|
|
|
35 |
|
36 |
+
dataset_path = os.getenv("XPIPE_WIKI_DATASET_PATH", "./dataset")
|
37 |
+
storage_context_manager = LocalStorageContextManager(
|
38 |
+
dataset_path=dataset_path, service_context_manager=service_context_manager
|
39 |
+
)
|
40 |
+
|
41 |
+
robot_manager = AzureXPipeWikiRobotManager(
|
42 |
+
service_context_manager=service_context_manager,
|
43 |
+
storage_context_manager=storage_context_manager,
|
44 |
+
)
|
45 |
LifecycleHelper.initialize_if_possible(robot_manager)
|
46 |
LifecycleHelper.start_if_possible(robot_manager)
|
47 |
return robot_manager
|
xpipe_wiki/robot_manager.py
CHANGED
@@ -16,29 +16,43 @@ class XPipeWikiRobot(ABC):
|
|
16 |
pass
|
17 |
|
18 |
|
19 |
-
@dataclasses
|
20 |
class AzureOpenAIXPipeWikiRobot(XPipeWikiRobot):
|
21 |
query_engine: BaseQueryEngine
|
22 |
|
|
|
|
|
|
|
|
|
23 |
def ask(self, question: str) -> Any:
|
24 |
return self.query_engine.query(question)
|
25 |
|
26 |
|
27 |
class XPipeWikiRobotManager(Lifecycle):
|
28 |
-
|
29 |
@abstractmethod
|
30 |
def get_robot(self) -> XPipeWikiRobot:
|
31 |
pass
|
32 |
|
33 |
|
34 |
-
@dataclasses
|
35 |
class AzureXPipeWikiRobotManager(XPipeWikiRobotManager):
|
36 |
service_context_manager: ServiceContextManager
|
37 |
storage_context_manager: StorageContextManager
|
38 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
def get_robot(self) -> XPipeWikiRobot:
|
40 |
-
index = load_index_from_storage(
|
41 |
-
|
|
|
|
|
|
|
|
|
42 |
return AzureOpenAIXPipeWikiRobot(query_engine)
|
43 |
|
44 |
def do_init(self) -> None:
|
|
|
16 |
pass
|
17 |
|
18 |
|
|
|
19 |
class AzureOpenAIXPipeWikiRobot(XPipeWikiRobot):
|
20 |
query_engine: BaseQueryEngine
|
21 |
|
22 |
+
def __init__(self, query_engine: BaseQueryEngine) -> None:
|
23 |
+
super().__init__()
|
24 |
+
self.query_engine = query_engine
|
25 |
+
|
26 |
def ask(self, question: str) -> Any:
|
27 |
return self.query_engine.query(question)
|
28 |
|
29 |
|
30 |
class XPipeWikiRobotManager(Lifecycle):
|
|
|
31 |
@abstractmethod
|
32 |
def get_robot(self) -> XPipeWikiRobot:
|
33 |
pass
|
34 |
|
35 |
|
|
|
36 |
class AzureXPipeWikiRobotManager(XPipeWikiRobotManager):
|
37 |
service_context_manager: ServiceContextManager
|
38 |
storage_context_manager: StorageContextManager
|
39 |
|
40 |
+
def __init__(
|
41 |
+
self,
|
42 |
+
service_context_manager: ServiceContextManager,
|
43 |
+
storage_context_manager: StorageContextManager,
|
44 |
+
) -> None:
|
45 |
+
super().__init__()
|
46 |
+
self.service_context_manager = service_context_manager
|
47 |
+
self.storage_context_manager = storage_context_manager
|
48 |
+
|
49 |
def get_robot(self) -> XPipeWikiRobot:
|
50 |
+
index = load_index_from_storage(
|
51 |
+
storage_context=self.storage_context_manager.get_storage_context()
|
52 |
+
)
|
53 |
+
query_engine = index.as_query_engine(
|
54 |
+
service_context=self.service_context_manager.get_service_context()
|
55 |
+
)
|
56 |
return AzureOpenAIXPipeWikiRobot(query_engine)
|
57 |
|
58 |
def do_init(self) -> None:
|