File size: 4,710 Bytes
9e7090f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
from abc import ABC
from abc import abstractmethod
class Provider(ABC):
"""Base class for providers"""
@abstractmethod
def ask(
self,
prompt: str,
stream: bool = False,
raw: bool = False,
optimizer: str = None,
conversationally: bool = False,
) -> dict:
"""Chat with AI
Args:
prompt (str): Prompt to be sent
stream (bool, optional): Flag for streaming response. Defaults to False.
raw (bool, optional): Stream back raw response as received
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
Returns:
dict : {}
```json
{
"completion": "\nNext: domestic cat breeds with short hair >>",
"stop_reason": null,
"truncated": false,
"stop": null,
"model": "llama-2-13b-chat",
"log_id": "cmpl-3kYiYxSNDvgMShSzFooz6t",
"exception": null
}
```
"""
raise NotImplementedError("Method needs to be implemented in subclass")
@abstractmethod
def chat(
self,
prompt: str,
stream: bool = False,
optimizer: str = None,
conversationally: bool = False,
) -> str:
"""Generate response `str`
Args:
prompt (str): Prompt to be sent
stream (bool, optional): Flag for streaming response. Defaults to False.
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
Returns:
str: Response generated
"""
raise NotImplementedError("Method needs to be implemented in subclass")
@abstractmethod
def get_message(self, response: dict) -> str:
"""Retrieves message only from response
Args:
response (dict): Response generated by `self.ask`
Returns:
str: Message extracted
"""
raise NotImplementedError("Method needs to be implemented in subclass")
class AsyncProvider(ABC):
"""Asynchronous base class for providers"""
@abstractmethod
async def ask(
self,
prompt: str,
stream: bool = False,
raw: bool = False,
optimizer: str = None,
conversationally: bool = False,
) -> dict:
"""Asynchronously chat with AI
Args:
prompt (str): Prompt to be sent
stream (bool, optional): Flag for streaming response. Defaults to False.
raw (bool, optional): Stream back raw response as received
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
Returns:
dict : {}
```json
{
"completion": "\nNext: domestic cat breeds with short hair >>",
"stop_reason": null,
"truncated": false,
"stop": null,
"model": "llama-2-13b-chat",
"log_id": "cmpl-3kYiYxSNDvgMShSzFooz6t",
"exception": null
}
```
"""
raise NotImplementedError("Method needs to be implemented in subclass")
@abstractmethod
async def chat(
self,
prompt: str,
stream: bool = False,
optimizer: str = None,
conversationally: bool = False,
) -> str:
"""Asynchronously generate response `str`
Args:
prompt (str): Prompt to be sent
stream (bool, optional): Flag for streaming response. Defaults to False.
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
Returns:
str: Response generated
"""
raise NotImplementedError("Method needs to be implemented in subclass")
@abstractmethod
async def get_message(self, response: dict) -> str:
"""Asynchronously retrieves message only from response
Args:
response (dict): Response generated by `self.ask`
Returns:
str: Message extracted
"""
raise NotImplementedError("Method needs to be implemented in subclass") |