Abhaykoul commited on
Commit
adff8c4
·
verified ·
1 Parent(s): b68d341

Update v2.py

Browse files
Files changed (1) hide show
  1. v2.py +36 -67
v2.py CHANGED
@@ -1,15 +1,11 @@
1
  import re
2
- from dotenv import load_dotenv
3
  import requests
4
  from uuid import uuid4
5
  import json
6
- from typing import Any, AsyncGenerator, Dict
7
- import os
8
-
9
- # Load environment variables from .env file
10
  load_dotenv()
11
-
12
-
13
  class v2:
14
  def __init__(
15
  self,
@@ -17,7 +13,7 @@ class v2:
17
  proxies: dict = {},
18
  ):
19
  self.session = requests.Session()
20
- self.chat_endpoint = os.getenv("v2")
21
  self.stream_chunk_size = 64
22
  self.timeout = timeout
23
  self.last_response = {}
@@ -31,14 +27,13 @@ class v2:
31
  self.session.headers.update(self.headers)
32
  self.session.proxies = proxies
33
 
34
-
35
  def ask(
36
  self,
37
  prompt: str,
38
  stream: bool = False,
39
  raw: bool = False,
40
- ) -> dict:
41
- conversation_prompt =f"Tell me evrything about {prompt}"
42
 
43
  self.session.headers.update(self.headers)
44
  payload = {
@@ -53,75 +48,49 @@ class v2:
53
  "contexts_from": "google"
54
  }
55
 
56
- def for_stream():
57
- response = self.session.post(
58
- self.chat_endpoint, json=payload, stream=True, timeout=self.timeout
 
 
 
59
  )
60
- if not response.ok:
61
- raise Exception(
62
- f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
63
- )
64
-
65
- streaming_text = ""
66
- for line in response.iter_lines(decode_unicode=True):
67
- if line.startswith('data:'):
68
- try:
69
- data = json.loads(line[5:].strip())
70
- if data['type'] == 'answer' and 'text' in data['data']:
71
- new_text = data['data']['text']
72
- if len(new_text) > len(streaming_text):
73
- delta = new_text[len(streaming_text):]
74
- streaming_text = new_text
75
- resp = dict(text=delta)
76
- self.last_response.update(dict(text=streaming_text))
77
- yield line if raw else resp
78
- except json.JSONDecodeError:
79
- pass
80
 
81
- def for_non_stream():
82
- full_response = ""
83
- for chunk in for_stream():
84
- if not raw:
85
- full_response += chunk['text']
86
- self.last_response = dict(text=full_response)
87
- return self.last_response
88
-
89
- return for_stream() if stream else for_non_stream()
 
 
 
 
 
 
90
 
91
  def chat(
92
  self,
93
  prompt: str,
94
  stream: bool = False,
95
- ) -> str:
96
- def for_stream():
97
- for response in self.ask(
98
- prompt, True
99
- ):
100
- yield self.get_message(response)
101
-
102
- def for_non_stream():
103
- return self.get_message(
104
- self.ask(
105
- prompt,
106
- False,
107
- )
108
- )
109
-
110
- return for_stream() if stream else for_non_stream()
111
 
112
  def get_message(self, response: dict) -> str:
113
  assert isinstance(response, dict), "Response should be of dict data-type only"
114
-
115
  if "text" in response:
116
  text = re.sub(r'\[\[\d+\]\]', '', response["text"])
117
  return text
118
  else:
119
  return ""
120
 
121
-
122
- if __name__ == '__main__':
123
- from rich import print
124
- ai = v2()
125
- response = ai.chat("HelpingAI-9B", stream=True)
126
- for chunk in response:
127
- print(chunk, end="", flush=True)
 
1
  import re
 
2
  import requests
3
  from uuid import uuid4
4
  import json
5
+ from typing import Any, AsyncGenerator, Dict, Generator
6
+ import os
7
+ from dotenv import load_dotenv
 
8
  load_dotenv()
 
 
9
  class v2:
10
  def __init__(
11
  self,
 
13
  proxies: dict = {},
14
  ):
15
  self.session = requests.Session()
16
+ self.chat_endpoint = os.get("v2")
17
  self.stream_chunk_size = 64
18
  self.timeout = timeout
19
  self.last_response = {}
 
27
  self.session.headers.update(self.headers)
28
  self.session.proxies = proxies
29
 
 
30
  def ask(
31
  self,
32
  prompt: str,
33
  stream: bool = False,
34
  raw: bool = False,
35
+ ) -> Generator[Dict[str, Any], None, None]:
36
+ conversation_prompt = f"Tell me everything about {prompt}"
37
 
38
  self.session.headers.update(self.headers)
39
  payload = {
 
48
  "contexts_from": "google"
49
  }
50
 
51
+ response = self.session.post(
52
+ self.chat_endpoint, json=payload, stream=True, timeout=self.timeout
53
+ )
54
+ if not response.ok:
55
+ raise Exception(
56
+ f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
57
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
+ streaming_text = ""
60
+ for line in response.iter_lines(decode_unicode=True):
61
+ if line.startswith('data:'):
62
+ try:
63
+ data = json.loads(line[5:].strip())
64
+ if data['type'] == 'answer' and 'text' in data['data']:
65
+ new_text = data['data']['text']
66
+ if len(new_text) > len(streaming_text):
67
+ delta = new_text[len(streaming_text):]
68
+ streaming_text = new_text
69
+ resp = dict(text=delta)
70
+ self.last_response.update(dict(text=streaming_text))
71
+ yield line if raw else resp
72
+ except json.JSONDecodeError:
73
+ pass
74
 
75
  def chat(
76
  self,
77
  prompt: str,
78
  stream: bool = False,
79
+ ) -> Generator[str, None, None]:
80
+ for response in self.ask(prompt, True):
81
+ yield self.format_text(self.get_message(response))
82
+ yield "[DONE]"
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
  def get_message(self, response: dict) -> str:
85
  assert isinstance(response, dict), "Response should be of dict data-type only"
86
+
87
  if "text" in response:
88
  text = re.sub(r'\[\[\d+\]\]', '', response["text"])
89
  return text
90
  else:
91
  return ""
92
 
93
+ def format_text(self, text: str) -> str:
94
+ # Convert *text* to <i>text</i> for italic
95
+ text = re.sub(r'\*(.*?)\*', r'<i>\1</i>', text)
96
+ return text