Spaces:
Running
Running
from langchain.chat_models import ChatOpenAI | |
from langchain.schema import AIMessage, HumanMessage | |
import openai | |
import gradio as gr | |
import os | |
os.environ["OPENAI_API_KEY"] = os.getenv('OPENAI_API_KEY') | |
llm = ChatOpenAI(temperature=1.0, model='gpt-3.5-turbo-0613') | |
question_json = os.getenv('QUESTION_JSON') | |
cv_json = os.getenv('CV_JSON') | |
def predict(message, history): | |
if find(message): | |
# prompt = ( | |
# # f"As Jisu(she/her/hers)'s personal assistant," | |
# # f"Given that: {question_json}, How can I assist with information on: {message}" | |
# ) | |
# prompt = "As Jisu(she/her/hers)'s personal assistant, Given that: {}, How can I assist with information on: {}".format(question_json, message) | |
prompt = os.env('PREDICT_PROMPT').format(question_json, message) | |
response = llm([HumanMessage(content=prompt)]) | |
return response.content | |
prompt = ( | |
# os.getenv('PREDICT_PROMPT1') | |
f"As Jisu(she/her/hers)'s personal assistant," | |
f"Given that: {cv_json}, How can I assist with information on: {message}" | |
f"If source does not contains relevant information, I will state that the information is not available." | |
) | |
response = llm([HumanMessage(content=prompt)]) | |
return response.content | |
def find(message): | |
prompt = ( | |
"Given the list of questions about Jisu's CV: \n" | |
"- What are Jisu's current projects?\n" | |
"- What are Jisu's publications?\n" | |
"- How can I reach out to Jisu?\n" | |
f"Determine if the following query matches any of the topics above: '{message}'. Answer 'Yes' if it matches, otherwise answer 'No'." | |
) | |
response = llm([HumanMessage(content=prompt)]) | |
if response.content.strip() == 'Yes': | |
return True | |
else: | |
return False | |
# Example inputs as buttons | |
examples = [ | |
"What are Jisu's current projects?", | |
"What are Jisu's publications?", | |
"How can I reach out to Jisu?", | |
# "How is the answer generated?" | |
] | |
with gr.Blocks(theme='gradio/soft', fill_height=True) as demo: | |
gr.Markdown( | |
""" | |
<img src="http://jisulog.kim/profile.png" alt="Profile Image" style="width: 200px; height: auto; border-radius: 50%;"> | |
# ๐ย Hi, I am Jisu Kim! | |
I am an MS candidate in the **Interactive Computing** at ๐**Georgia Tech**. I am advised by [**Ashok Goel**](https://dilab.gatech.edu/ashok-k-goel/) and [**Richmond Wong**](https://richmondywong.com/), and was previously advised by [**Juho Kim**](https://juhokim.com/) at ๐ชฟ**KAIST**. | |
My research interests lie at the intersection of artificial intelligence (AI) and human-computer interaction (HCI). I am focused on enhancing both productivity tools and creativity support tools in learning environments. My goal is to develop AI technologies that enrich the learning experience and enhance human-AI interaction. Driven by my passion for creating human-centric AI technologies, I am applying to Ph.D. programs for Fall 2025! | |
[**LinkedIn**](https://www.linkedin.com/in/jisulog/) | [**Twitter**](https://x.com/jisukiim?s=21) | [**CV**](https://ddiddu.github.io/JisuKim_CV.pdf) | [**YouTube**](https://youtu.be/btZOScj22jE?si=0zz5y61KNLsBJXcm) | |
--- | |
# ๐ค Hi, I am Jisu's personal assistant! | |
**Ask about Jisu and I will provide you with the information as far as I know.** | |
I am currently under development. If there are errors or improvements, feel free to share with Jisu! You don't know how to reach out to Jisu? **Ask me!** | |
""") | |
gr.ChatInterface(predict, examples=examples) | |
if __name__ == "__main__": | |
demo.launch(share=True) |