Eric Botti
commited on
Commit
·
dd5d973
1
Parent(s):
92e6f44
added user input to app (non-functional)
Browse files- src/app.py +35 -11
- src/game.py +1 -1
- src/player.py +6 -6
src/app.py
CHANGED
@@ -18,7 +18,22 @@ def display_message(message):
|
|
18 |
elif message["type"] == "debug":
|
19 |
messages_container.markdown(f":orange[DEBUG: {message['content']}]")
|
20 |
|
|
|
21 |
class StreamlitGame(Game):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
def human_message(self, message: str):
|
23 |
message = {"type": "game", "content": message}
|
24 |
st.session_state["messages"].append(message)
|
@@ -39,8 +54,10 @@ class StreamlitGame(Game):
|
|
39 |
|
40 |
if "messages" not in st.session_state:
|
41 |
st.session_state["messages"] = []
|
42 |
-
if "
|
43 |
-
st.session_state["
|
|
|
|
|
44 |
|
45 |
margin_size = 1
|
46 |
center_size = 3
|
@@ -53,17 +70,24 @@ with title_center:
|
|
53 |
left, center, right = st.columns([margin_size, center_size, margin_size])
|
54 |
|
55 |
with center:
|
56 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
|
58 |
-
|
|
|
59 |
|
60 |
-
|
61 |
-
st.
|
62 |
-
|
63 |
-
st.
|
64 |
|
|
|
65 |
|
66 |
-
messages_container = center.container()
|
67 |
|
68 |
-
if start_button:
|
69 |
-
asyncio.run(st.session_state["game"].start())
|
|
|
18 |
elif message["type"] == "debug":
|
19 |
messages_container.markdown(f":orange[DEBUG: {message['content']}]")
|
20 |
|
21 |
+
|
22 |
class StreamlitGame(Game):
|
23 |
+
@staticmethod
|
24 |
+
async def human_input(prompt: str) -> str:
|
25 |
+
_user_input = st.chat_input("Your message", key=f"user_input_{st.session_state.user_input_id}")
|
26 |
+
st.session_state.user_input_id += 1
|
27 |
+
|
28 |
+
while _user_input is None or _user_input == "":
|
29 |
+
sleep(0.1)
|
30 |
+
|
31 |
+
print(f"User input: {_user_input}")
|
32 |
+
|
33 |
+
response = AIMessage(content=_user_input)
|
34 |
+
|
35 |
+
return response
|
36 |
+
|
37 |
def human_message(self, message: str):
|
38 |
message = {"type": "game", "content": message}
|
39 |
st.session_state["messages"].append(message)
|
|
|
54 |
|
55 |
if "messages" not in st.session_state:
|
56 |
st.session_state["messages"] = []
|
57 |
+
if "game_started" not in st.session_state:
|
58 |
+
st.session_state["game_started"] = False
|
59 |
+
if "user_input_id" not in st.session_state:
|
60 |
+
st.session_state["user_input_id"] = 0
|
61 |
|
62 |
margin_size = 1
|
63 |
center_size = 3
|
|
|
70 |
left, center, right = st.columns([margin_size, center_size, margin_size])
|
71 |
|
72 |
with center:
|
73 |
+
messages_container = st.container()
|
74 |
+
|
75 |
+
messages_container.write("Welcome to Chameleon! A social deduction game powered by LLMs.")
|
76 |
+
|
77 |
+
messages_container.write("Enter your name to begin...")
|
78 |
+
|
79 |
+
if st.session_state.messages:
|
80 |
+
for message in st.session_state.messages:
|
81 |
+
display_message(message)
|
82 |
|
83 |
+
user_input = st.chat_input("Your message")
|
84 |
+
st.session_state.user_input_id += 1
|
85 |
|
86 |
+
if not st.session_state.game_started and user_input:
|
87 |
+
st.session_state.game_started = True
|
88 |
+
if "game" not in st.session_state:
|
89 |
+
st.session_state.game = StreamlitGame(human_name=user_input, verbose=True)
|
90 |
|
91 |
+
asyncio.run(st.session_state.game.start())
|
92 |
|
|
|
93 |
|
|
|
|
src/game.py
CHANGED
@@ -131,7 +131,7 @@ class Game:
|
|
131 |
# The following methods are used to broadcast messages to a human.
|
132 |
# They are design so that they can be overridden by a subclass for a different player interface.
|
133 |
@staticmethod
|
134 |
-
def human_input(prompt: str) ->
|
135 |
"""Gets input from the human player."""
|
136 |
response = AIMessage(content=input())
|
137 |
return response
|
|
|
131 |
# The following methods are used to broadcast messages to a human.
|
132 |
# They are design so that they can be overridden by a subclass for a different player interface.
|
133 |
@staticmethod
|
134 |
+
async def human_input(prompt: str) -> AIMessage:
|
135 |
"""Gets input from the human player."""
|
136 |
response = AIMessage(content=input())
|
137 |
return response
|
src/player.py
CHANGED
@@ -138,22 +138,22 @@ class Player:
|
|
138 |
def is_ai(self):
|
139 |
return not self.is_human()
|
140 |
|
141 |
-
def _generate(self, message: Message):
|
142 |
"""Entry point for the Runnable generating responses, automatically logs the message."""
|
143 |
self.add_to_history(message)
|
144 |
|
145 |
# AI's need to be fed the whole message history, but humans can just go back and look at it
|
146 |
if self.controller_type == "human":
|
147 |
-
response = self.controller.
|
148 |
else:
|
149 |
formatted_messages = [(message.langchain_type, message.content) for message in self.messages]
|
150 |
-
response = self.controller.
|
151 |
|
152 |
self.add_to_history(Message(type="player", content=response.content))
|
153 |
|
154 |
return response
|
155 |
|
156 |
-
def _output_formatter(self, inputs: dict):
|
157 |
"""Formats the output of the response."""
|
158 |
output_format: BaseModel = inputs["output_format"]
|
159 |
|
@@ -167,6 +167,6 @@ class Player:
|
|
167 |
|
168 |
message = Message(type="player", content=prompt.text)
|
169 |
|
170 |
-
response = self.generate.
|
171 |
|
172 |
-
return parser.
|
|
|
138 |
def is_ai(self):
|
139 |
return not self.is_human()
|
140 |
|
141 |
+
async def _generate(self, message: Message):
|
142 |
"""Entry point for the Runnable generating responses, automatically logs the message."""
|
143 |
self.add_to_history(message)
|
144 |
|
145 |
# AI's need to be fed the whole message history, but humans can just go back and look at it
|
146 |
if self.controller_type == "human":
|
147 |
+
response = await self.controller.ainvoke(message.content)
|
148 |
else:
|
149 |
formatted_messages = [(message.langchain_type, message.content) for message in self.messages]
|
150 |
+
response = await self.controller.ainvoke(formatted_messages)
|
151 |
|
152 |
self.add_to_history(Message(type="player", content=response.content))
|
153 |
|
154 |
return response
|
155 |
|
156 |
+
async def _output_formatter(self, inputs: dict):
|
157 |
"""Formats the output of the response."""
|
158 |
output_format: BaseModel = inputs["output_format"]
|
159 |
|
|
|
167 |
|
168 |
message = Message(type="player", content=prompt.text)
|
169 |
|
170 |
+
response = await self.generate.ainvoke(message)
|
171 |
|
172 |
+
return await parser.ainvoke(response)
|