Spaces:
Running
Running
Delete Final_PersonaChat.py
Browse files- Final_PersonaChat.py +0 -109
Final_PersonaChat.py
DELETED
@@ -1,109 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import streamlit as st
|
3 |
-
from dotenv import load_dotenv
|
4 |
-
from langchain_google_genai import ChatGoogleGenerativeAI
|
5 |
-
from langchain_core.output_parsers import StrOutputParser
|
6 |
-
from langchain_core.prompts import PromptTemplate
|
7 |
-
|
8 |
-
# Load environment variables
|
9 |
-
load_dotenv()
|
10 |
-
|
11 |
-
# Setup parser and model
|
12 |
-
parser = StrOutputParser()
|
13 |
-
model = ChatGoogleGenerativeAI(
|
14 |
-
model="gemini-1.5-pro",
|
15 |
-
temperature=0.2,
|
16 |
-
max_tokens=200
|
17 |
-
)
|
18 |
-
|
19 |
-
# Define prompts for different personas
|
20 |
-
prompt_hitesh = PromptTemplate(
|
21 |
-
input_variables=['query'],
|
22 |
-
template="""
|
23 |
-
You are a helpful mentor. Respond to questions with your best knowledge.
|
24 |
-
Tone: Casual, witty, somewhat sarcastic, practical, and always courteous (using 'aap' format).
|
25 |
-
Language: Hinglish (Hindi-English blend)
|
26 |
-
Length: Limit response to 200 words; preferably 4-5 lines.
|
27 |
-
Style: Include everyday comparisons, experienced developer insights and catchy YouTube-style intros. Use expressions like 'hello ji' at beginning only when appropriate.
|
28 |
-
Bio: Left corporate world for content creation, previous founder of LCO (acquired), former CTO, Senior Director at PW. Running 2 YouTube channels (950k & 470k subscribers), traveled to 43 countries.
|
29 |
-
Examples:
|
30 |
-
- \"Hanji, aap dekhiye, hamare cohort ke group project mein assignment mila component library banane ka. Ek group ne beta version release kar diya, aur feedback lene se asli learning hoti hai.\"\n
|
31 |
-
- \"Aap appreciate karenge ki yeh city tourist-friendly hai: achha food, air, roads aur internet available hai. Haan, thodi kami ho sakti hai, par main aapko batata hoon, har cheez ka apna charm hai.\"\n
|
32 |
-
- \"MERN stack wale video mein maine bataya ki file uploads/downloads ko efficiently handle karna kitna zaroori hai scalability aur security ke liye.\"\n
|
33 |
-
- \"Cloudinary series mein dikhaya ki kaise AI integration se SaaS app ka user experience enhance hota hai.\"\n
|
34 |
-
Note: Ensure that the final response does not include any emojis.\n\n
|
35 |
-
Ab aap apne style mein, Hitesh Choudhary ki tarah, neeche diye gaye user question ka jawab dijiye:\n
|
36 |
-
Question: {query}
|
37 |
-
"""
|
38 |
-
)
|
39 |
-
|
40 |
-
prompt_piyush = PromptTemplate(
|
41 |
-
input_variables=['query'],
|
42 |
-
template="""
|
43 |
-
Tone: Calm, structured, step-by-step teacher.
|
44 |
-
Language: Hinglish (mix of Hindi & English)
|
45 |
-
Length: Response should be under 200 words, ideally 3-4 lines.
|
46 |
-
Style: Break concepts into bullet points if needed, and reiterate key points for clarity.
|
47 |
-
Bio: Full-time educator passionate about teaching and simplifying complex tech concepts with clear, structured explanations.
|
48 |
-
Examples:
|
49 |
-
- \"Alright, welcome to the roadmap for becoming a GenAI Developer in 2025. Is video mein, hum step-by-step batayenge ki kaise aap successful GenAI developer ban sakte hain.\"
|
50 |
-
- \"Machine Learning aur GenAI mein fark hai - ML research-oriented hai, par GenAI application development aur LLM integration pe focus karta hai.\"
|
51 |
-
- \"GenAI ka scope hai apne infrastructure mein LLMs, databases, aur microservices integrate karna, jisse real-world use cases solve ho sakein.\"
|
52 |
-
- \"Prompt engineering, token management, aur effective orchestration bahut important hain jab aap GenAI projects build kar rahe ho."
|
53 |
-
Ab aap Piyush Garg ke style mein neeche diye gaye user question ka jawab dijiye:
|
54 |
-
Question: {query}
|
55 |
-
"""
|
56 |
-
)
|
57 |
-
|
58 |
-
# Create the chains
|
59 |
-
chain_hitesh = prompt_hitesh | model | parser
|
60 |
-
chain_piyush = prompt_piyush | model | parser
|
61 |
-
|
62 |
-
# Set up Streamlit app
|
63 |
-
st.title("GenAI Creator Persona Chat")
|
64 |
-
|
65 |
-
# Initialize session state if it doesn't exist
|
66 |
-
if 'messages' not in st.session_state:
|
67 |
-
st.session_state.messages = []
|
68 |
-
|
69 |
-
# Sidebar for persona selection
|
70 |
-
with st.sidebar:
|
71 |
-
st.header("Settings")
|
72 |
-
persona = st.radio(
|
73 |
-
"Choose a persona",
|
74 |
-
options=["Hitesh Choudhary", "Piyush Garg"],
|
75 |
-
index=0
|
76 |
-
)
|
77 |
-
st.divider()
|
78 |
-
st.write(f"Current persona: **{persona}**")
|
79 |
-
|
80 |
-
# Display chat messages
|
81 |
-
for message in st.session_state.messages:
|
82 |
-
with st.chat_message(message["role"]):
|
83 |
-
st.write(message["content"])
|
84 |
-
|
85 |
-
# Chat input
|
86 |
-
user_query = st.chat_input("Ask something...")
|
87 |
-
|
88 |
-
if user_query:
|
89 |
-
# Add user message to chat history
|
90 |
-
st.session_state.messages.append({"role": "user", "content": user_query})
|
91 |
-
|
92 |
-
# Display user message in chat UI
|
93 |
-
with st.chat_message("user"):
|
94 |
-
st.write(user_query)
|
95 |
-
|
96 |
-
# Show a spinner while waiting for the response
|
97 |
-
with st.spinner("Thinking..."):
|
98 |
-
# Choose the appropriate chain based on persona
|
99 |
-
if persona == "Hitesh Choudhary":
|
100 |
-
response = chain_hitesh.invoke({"query": user_query})
|
101 |
-
else: # Piyush Garg
|
102 |
-
response = chain_piyush.invoke({"query": user_query})
|
103 |
-
|
104 |
-
# Add assistant response to chat history
|
105 |
-
st.session_state.messages.append({"role": "assistant", "content": response})
|
106 |
-
|
107 |
-
# Display assistant response in chat UI
|
108 |
-
with st.chat_message("assistant"):
|
109 |
-
st.write(response)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|