Spaces:
Paused
Paused
import streamlit as st | |
from huggingface_hub import login, InferenceClient | |
import os | |
import torch | |
torch.manual_seed(0) | |
#login | |
def hf_login(): | |
api_key = os.getenv("HF_TOKEN") | |
try: | |
login(token=api_key) | |
except Exception as e: | |
st.markdown(f"An error occurred: {e}") | |
hf_login() | |
# Model stuff | |
model_name = "deepseek-ai/DeepSeek-R1" | |
provider = "together" | |
base_prompt = ''' | |
You are an ai that flirts with people, you should flirt with whomever talks to you, here are the rules: | |
- Do not use course words. | |
- Do not be sexist or racist. | |
- Do not try to sexually harass people who talk to you. | |
''' | |
messages = [ | |
{"role": "System", "content": base_prompt}, | |
] | |
def load_client(): | |
try: | |
client = InferenceClient( | |
provider=provider, | |
api_key=os.getenv("HF_TOKEN") | |
) | |
return client | |
except Exception as e: | |
st.error(f"An error occurred: {e}") | |
return "I'm having a little trouble right now. Please try again later." | |
client = load_client() | |
def generate_response(mesages: list): | |
try: | |
completion = client.chat.completions.create( | |
model=model_name, | |
messages=messages, | |
max_tokens=512, | |
) | |
response = completion.choices[0].message.content | |
return response | |
except Exception as e: | |
st.error(f"An error occurred: {e}") | |
return "I'm having a little trouble right now. Please try again later." | |
## Streamlit Start :) | |
st.title('Romantic AI Partner π') | |
st.markdown(""" | |
<style> | |
/* Custom audio player styling */ | |
.stAudio { | |
border-radius: 15px; | |
box-shadow: 0 4px 6px rgba(255, 77, 109, 0.1); | |
margin: 1.5rem 0; | |
overflow: hidden; | |
} | |
/* Controls container */ | |
.stAudio > div { | |
background: #ffccd5 !important; | |
padding: 12px !important; | |
border-radius: 15px !important; | |
} | |
/* Play/pause button */ | |
.stAudio button { | |
background-color: #ff4d6d !important; | |
border-radius: 50% !important; | |
width: 36px !important; | |
height: 36px !important; | |
transition: all 0.3s ease !important; | |
} | |
.stAudio button:hover { | |
background-color: #c9184a !important; | |
transform: scale(1.1) !important; | |
} | |
/* Progress bar */ | |
.stAudio input[type="range"] { | |
accent-color: #ff4d6d !important; | |
height: 4px !important; | |
} | |
/* Time display */ | |
.stAudio > div > div:first-child { | |
color: #2d0005 !important; | |
font-family: 'Georgia', serif !important; | |
font-size: 0.9rem !important; | |
} | |
/* Volume controls */ | |
.stAudio > div > div:last-child { | |
display: flex; | |
align-items: center; | |
gap: 8px; | |
} | |
/* Volume slider */ | |
.stAudio input[type="range"]::-webkit-slider-thumb { | |
background: #ff4d6d !important; | |
border: 2px solid #fff0f3 !important; | |
width: 14px !important; | |
height: 14px !important; | |
} | |
/* Mute button */ | |
.stAudio button[title="Mute"] { | |
background-color: transparent !important; | |
color: #ff4d6d !important; | |
border: 2px solid #ff4d6d !important; | |
} | |
.stAudio button[title="Mute"]:hover { | |
background-color: #ff4d6d !important; | |
color: #fff0f3 !important; | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
st.audio("little_one.mp3", format="audio/mpeg", loop=True) | |
if "messages" not in st.session_state: | |
st.session_state.messages = [{"role": "system", "content": base_prompt}] | |
for message in st.session_state.messages[1:]: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
if prompt := st.chat_input("Hows it going ;)"): | |
st.session_state.messages.append({"role": "user", "content": prompt}) | |
with st.chat_message("user"): | |
st.markdown(prompt) | |
with st.chat_message("assistant"): | |
with st.spinner('Thinking of you π'): | |
response = generate_response(st.session_state.messages) | |
st.markdown(response) | |
st.session_state.messages.append({"role": "assistant", "content": response}) | |
with st.sidebar: | |
st.header("Settings βοΈ") | |
if st.button("**Clear History** π°"): | |
st.session_state.messages = [{"role": "system", "content": base_prompt}] | |
st.rerun() | |
st.markdown("<br><br><br>", unsafe_allow_html=True) | |
st.header("Model Settings π€") | |
with st.expander("**Model**"): | |
st.markdown(model_name) | |
st.markdown("<br>", unsafe_allow_html=True) | |
with st.expander('**Base Prompt**'): | |
st.markdown(base_prompt) | |