Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,12 +9,8 @@ from langchain.memory import ConversationBufferMemory as MEM,RedisChatMessageHis
|
|
9 |
from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM
|
10 |
from langchain import hub
|
11 |
import os
|
12 |
-
|
13 |
|
14 |
-
llm1 = Llama.from_pretrained(
|
15 |
-
repo_id="peterpeter8585/deepseek-llm-7b-chat-Q8_0-GGUF",
|
16 |
-
filename="deepseek-llm-7b-chat-q8_0.gguf",
|
17 |
-
)
|
18 |
|
19 |
from langchain_core.prompts.chat import ChatPromptTemplate, MessagesPlaceholder
|
20 |
system = '''Respond to the human as helpfully and accurately as possible. You have access to the following tools:
|
@@ -93,6 +89,7 @@ class Chatchat(BaseChatModel):
|
|
93 |
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name, trust_remote_code=True)
|
94 |
self.model = AutoModelForCausalLM.from_pretrained(
|
95 |
self.model_name, trust_remote_code=True)
|
|
|
96 |
|
97 |
|
98 |
def _call(
|
|
|
9 |
from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM
|
10 |
from langchain import hub
|
11 |
import os
|
12 |
+
import torch
|
13 |
|
|
|
|
|
|
|
|
|
14 |
|
15 |
from langchain_core.prompts.chat import ChatPromptTemplate, MessagesPlaceholder
|
16 |
system = '''Respond to the human as helpfully and accurately as possible. You have access to the following tools:
|
|
|
89 |
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name, trust_remote_code=True)
|
90 |
self.model = AutoModelForCausalLM.from_pretrained(
|
91 |
self.model_name, trust_remote_code=True)
|
92 |
+
self.model=self
|
93 |
|
94 |
|
95 |
def _call(
|