Spaces:
Sleeping
Sleeping
import streamlit as st | |
import requests | |
from bs4 import BeautifulSoup | |
import g4f | |
# Set the title of the app | |
st.title("Chat with GPT-4 Free") | |
# Initialize chat history in session state | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
# Dropdown for model selection | |
model_options = { | |
"GPT-3.5 Turbo": g4f.models.gpt_3_5_turbo, | |
"GPT-4": g4f.models.gpt_4, | |
"GPT-4o": g4f.models.gpt_4o, | |
} | |
selected_model = st.sidebar.selectbox("Select Model", list(model_options.keys())) | |
# Toggle for internet search functionality | |
search_enabled = st.sidebar.checkbox("Enable Internet Search") | |
# Function to perform a Google search | |
def google_search(query): | |
headers = { | |
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36' | |
} | |
# Construct the search URL | |
url = f"https://www.google.com/search?q={query}" | |
# Send the request to Google | |
response = requests.get(url, headers=headers) | |
# Parse the HTML content | |
soup = BeautifulSoup(response.text, 'html.parser') | |
# Find all search result containers | |
results = soup.find_all('div', class_='g') | |
# Extract titles and summaries | |
search_results = [] | |
for result in results: | |
title = result.find('h3') | |
summary = result.find('span', class_='aCOpRe') | |
if title and summary: | |
search_results.append({ | |
'title': title.text, | |
'summary': summary.text | |
}) | |
return search_results | |
# Function to generate responses from g4f | |
def generate_response(prompt, model, search=False): | |
if search: | |
# Perform a web search and return results | |
search_results = google_search(prompt) | |
formatted_results = "\n".join([f"{result['title']}: {result['summary']}" for result in search_results]) | |
return f"Search Results:\n{formatted_results}" if formatted_results else "No results found." | |
response = g4f.ChatCompletion.create( | |
model=model, | |
messages=[{"role": "user", "content": prompt}], | |
stream=True # Enable streaming for real-time response | |
) | |
return response | |
# Display chat messages from history | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# Accept user input | |
if prompt := st.chat_input("Type your message here..."): | |
# Display user message in chat message container | |
with st.chat_message("user"): | |
st.markdown(prompt) | |
# Add user message to chat history | |
st.session_state.messages.append({"role": "user", "content": prompt}) | |
# Generate and display AI response | |
with st.chat_message("assistant"): | |
response = generate_response(prompt, model_options[selected_model], search=search_enabled) | |
if isinstance(response, str): # Check if the response is a string | |
st.markdown(response) | |
# Add assistant's response to chat history | |
st.session_state.messages.append({"role": "assistant", "content": response}) |