Serg4451D commited on
Commit
29850df
·
verified ·
1 Parent(s): 7ea8dbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -37
app.py CHANGED
@@ -1,52 +1,43 @@
1
  import streamlit as st
2
- import g4f
3
 
4
- # Set the title of the app
5
- st.title("Chat with GPT-4 Free")
 
 
 
 
6
 
7
- # Initialize chat history in session state
8
  if "messages" not in st.session_state:
9
  st.session_state.messages = []
10
 
11
- # Model selection
12
- model_options = {
13
- "GPT-4 Optimized": g4f.models.gpt_4o,
14
- "GPT-4": g4f.models.gpt_4,
15
- "GPT-4 Turbo": g4f.models.gpt_4_turbo,
16
- "GPT-4 Optimized Mini": g4f.models.gpt_4o_mini
17
- }
18
-
19
- selected_model = st.selectbox("Choose a model:", list(model_options.keys()))
20
 
21
- # Function to generate responses from g4f
22
- def generate_response(prompt):
23
- response = g4f.ChatCompletion.create(
24
- model=model_options[selected_model],
25
- messages=[{"role": "user", "content": prompt}],
26
- stream=True # Enable streaming for real-time response
27
- )
28
- return response
29
 
30
  # Display chat messages from history
31
  for message in st.session_state.messages:
32
  with st.chat_message(message["role"]):
33
  st.markdown(message["content"])
34
 
35
- # Accept user input
36
- if prompt := st.chat_input("Type your message here..."):
37
- # Display user message in chat message container
38
- with st.chat_message("user"):
39
- st.markdown(prompt)
40
-
41
- # Add user message to chat history
42
- st.session_state.messages.append({"role": "user", "content": prompt})
43
 
44
- # Generate and display AI response
 
 
 
 
 
 
45
  with st.chat_message("assistant"):
46
- response = generate_response(prompt)
47
- for msg in response:
48
- if isinstance(msg, str): # Check if the message is a string
49
- st.markdown(msg)
50
-
51
- # Add assistant's response to chat history
52
- st.session_state.messages.append({"role": "assistant", "content": msg})
 
1
  import streamlit as st
2
+ from g4f import ChatCompletion
3
 
4
+ # List of available models
5
+ models = [
6
+ "gpt-4o", "gpt-4o-mini", "gpt-4",
7
+ "gpt-4-turbo", "gpt-3.5-turbo",
8
+ "claude-3.5-sonnet", "claude-3-opus", "claude-3-haiku"
9
+ ]
10
 
11
+ # Initialize chat history
12
  if "messages" not in st.session_state:
13
  st.session_state.messages = []
14
 
15
+ # Streamlit app title
16
+ st.title("Chat with AI Models")
 
 
 
 
 
 
 
17
 
18
+ # Model selection
19
+ selected_model = st.selectbox("Choose a model:", models)
 
 
 
 
 
 
20
 
21
  # Display chat messages from history
22
  for message in st.session_state.messages:
23
  with st.chat_message(message["role"]):
24
  st.markdown(message["content"])
25
 
26
+ # User input
27
+ if user_input := st.chat_input("What do you want to ask?"):
28
+ # Display user message
29
+ st.chat_message("user").markdown(user_input)
30
+ st.session_state.messages.append({"role": "user", "content": user_input})
 
 
 
31
 
32
+ # Get response from selected model
33
+ response = ChatCompletion.create(
34
+ model=selected_model,
35
+ messages=st.session_state.messages
36
+ )
37
+
38
+ # Display assistant response
39
  with st.chat_message("assistant"):
40
+ st.markdown(response['choices'][0]['message']['content'])
41
+
42
+ # Add assistant response to chat history
43
+ st.session_state.messages.append({"role": "assistant", "content": response['choices'][0]['message']['content']})