Spaces:
Running
Running
import asyncio | |
from ollama import AsyncClient | |
import streamlit as st | |
async def chat(prompt): | |
message = {'role': 'user', 'content': prompt} | |
async for part in await AsyncClient().chat(model='deepseek-r1:1.5b', messages=[message], stream=True): | |
yield part['message']['content'] | |
async def get_full_response(prompt): | |
response = "" | |
async for part in chat(prompt): | |
response += part | |
return response | |
def main(): | |
st.title("Deepseek-R1 Chat") | |
prompt = st.text_input("Enter your prompt:") | |
if st.button("Generate"): | |
if prompt: | |
with st.spinner("Generating response..."): | |
full_response = asyncio.run(get_full_response(prompt)) | |
st.write(full_response) | |
if __name__ == '__main__': | |
main() |