Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,37 +1,50 @@
|
|
1 |
import json
|
|
|
2 |
from flask import Flask, request, jsonify
|
3 |
from transformers import pipeline
|
4 |
|
5 |
-
# Load
|
6 |
-
llm = pipeline("text-generation", model="
|
7 |
|
8 |
-
# Load the schemes data from JSON
|
9 |
-
|
10 |
-
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
app = Flask(__name__)
|
13 |
|
14 |
-
# Function to search for relevant information
|
15 |
def find_scheme_info(query):
|
16 |
-
for scheme in schemes_data
|
17 |
if query.lower() in scheme["name"].lower() or query.lower() in scheme["description"].lower():
|
18 |
return scheme["description"]
|
19 |
-
return
|
20 |
|
21 |
-
# Function to generate
|
22 |
def chatbot_response(query):
|
23 |
scheme_info = find_scheme_info(query)
|
24 |
-
|
25 |
-
|
26 |
-
|
|
|
|
|
27 |
response = llm(query, max_length=200, do_sample=True)
|
28 |
return response[0]['generated_text']
|
|
|
|
|
29 |
|
|
|
30 |
@app.route("/chat", methods=["POST"])
|
31 |
def chat():
|
32 |
-
user_input = request.json.get("query", "")
|
|
|
33 |
if not user_input:
|
34 |
return jsonify({"error": "No query provided"}), 400
|
|
|
35 |
response = chatbot_response(user_input)
|
36 |
return jsonify({"response": response})
|
37 |
|
|
|
1 |
import json
|
2 |
+
import os
|
3 |
from flask import Flask, request, jsonify
|
4 |
from transformers import pipeline
|
5 |
|
6 |
+
# Load a public model that doesn't require authentication
|
7 |
+
llm = pipeline("text-generation", model="tiiuae/falcon-7b-instruct")
|
8 |
|
9 |
+
# Load the schemes data from JSON if available
|
10 |
+
SCHEMES_FILE = "schemes.json"
|
11 |
+
|
12 |
+
if os.path.exists(SCHEMES_FILE):
|
13 |
+
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
|
14 |
+
schemes_data = json.load(f)
|
15 |
+
else:
|
16 |
+
schemes_data = {"schemes": []} # Default to empty if file is missing
|
17 |
|
18 |
app = Flask(__name__)
|
19 |
|
20 |
+
# Function to search for relevant scheme information
|
21 |
def find_scheme_info(query):
|
22 |
+
for scheme in schemes_data.get("schemes", []):
|
23 |
if query.lower() in scheme["name"].lower() or query.lower() in scheme["description"].lower():
|
24 |
return scheme["description"]
|
25 |
+
return None # Return None instead of a default message
|
26 |
|
27 |
+
# Function to generate chatbot response
|
28 |
def chatbot_response(query):
|
29 |
scheme_info = find_scheme_info(query)
|
30 |
+
|
31 |
+
if scheme_info:
|
32 |
+
return scheme_info # Return scheme info if found
|
33 |
+
|
34 |
+
try:
|
35 |
response = llm(query, max_length=200, do_sample=True)
|
36 |
return response[0]['generated_text']
|
37 |
+
except Exception as e:
|
38 |
+
return f"Error generating response: {str(e)}"
|
39 |
|
40 |
+
# API route for chatbot
|
41 |
@app.route("/chat", methods=["POST"])
|
42 |
def chat():
|
43 |
+
user_input = request.json.get("query", "").strip()
|
44 |
+
|
45 |
if not user_input:
|
46 |
return jsonify({"error": "No query provided"}), 400
|
47 |
+
|
48 |
response = chatbot_response(user_input)
|
49 |
return jsonify({"response": response})
|
50 |
|