sivapriya14 commited on
Commit
7123e1b
·
verified ·
1 Parent(s): 51d6136

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -21
app.py CHANGED
@@ -1,29 +1,39 @@
1
- import gradio as gr
2
  import json
 
3
  from transformers import pipeline
4
 
5
- # Load JSON data
6
- with open("schemes.json", "r") as file:
7
- scheme_data = json.load(file)
8
 
9
- # Function to search relevant scheme information
10
- def search_scheme(query):
11
- for scheme in scheme_data:
 
 
 
 
 
 
12
  if query.lower() in scheme["name"].lower() or query.lower() in scheme["description"].lower():
13
- return f"**Scheme Name:** {scheme['name']}\n\n**Description:** {scheme['description']}\n\n**Eligibility:** {scheme['eligibility']}\n\n**Benefits:** {scheme['benefits']}"
14
- return "Sorry, I couldn't find details on that scheme."
15
 
16
- # Load LLM model
17
- llm = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct")
 
 
 
 
 
 
18
 
19
- # Chatbot function
20
- def chatbot(query):
21
- relevant_info = search_scheme(query)
22
- if "Sorry" in relevant_info:
23
- return relevant_info
24
- response = llm(f"Provide a concise summary of the following government scheme:\n{relevant_info}", max_length=200)
25
- return response[0]["generated_text"]
26
 
27
- # Gradio Interface
28
- demo = gr.Interface(fn=chatbot, inputs="text", outputs="text", title="Govt Scheme Chatbot", description="Ask about any government scheme!")
29
- demo.launch()
 
 
1
  import json
2
+ from flask import Flask, request, jsonify
3
  from transformers import pipeline
4
 
5
+ # Load the model (use a public model)
6
+ llm = pipeline("text-generation", model="mistralai/Mistral-7B-v0.1")
 
7
 
8
+ # Load the schemes data from JSON
9
+ with open("schemes.json", "r", encoding="utf-8") as f:
10
+ schemes_data = json.load(f)
11
+
12
+ app = Flask(__name__)
13
+
14
+ # Function to search for relevant information
15
+ def find_scheme_info(query):
16
+ for scheme in schemes_data["schemes"]:
17
  if query.lower() in scheme["name"].lower() or query.lower() in scheme["description"].lower():
18
+ return scheme["description"]
19
+ return "Sorry, I couldn't find information on that scheme."
20
 
21
+ # Function to generate a chatbot response
22
+ def chatbot_response(query):
23
+ scheme_info = find_scheme_info(query)
24
+ if scheme_info != "Sorry, I couldn't find information on that scheme.":
25
+ return scheme_info
26
+ else:
27
+ response = llm(query, max_length=200, do_sample=True)
28
+ return response[0]['generated_text']
29
 
30
+ @app.route("/chat", methods=["POST"])
31
+ def chat():
32
+ user_input = request.json.get("query", "")
33
+ if not user_input:
34
+ return jsonify({"error": "No query provided"}), 400
35
+ response = chatbot_response(user_input)
36
+ return jsonify({"response": response})
37
 
38
+ if __name__ == "__main__":
39
+ app.run(host="0.0.0.0", port=7860, debug=True)