Spaces:
Running
Running
File size: 7,056 Bytes
423a42f 822dfd5 dd5f028 99d7e90 6257584 adb775b dd5f028 1af9f6b 509ca73 adb775b 6d8186b 6257584 ec2738b 7497699 a2be7db 10f9ea6 333978a 0204bd8 e1c567a 5aecc17 10f9ea6 1af9f6b 0a134b5 5aecc17 e60d9fc 10f9ea6 09a4a4b 0a134b5 09a4a4b e60d9fc 09a4a4b 56bb78e 09a4a4b 56bb78e 5abdf22 09a4a4b 10f9ea6 e60d9fc ccb7aa0 370c257 24c6700 dd5f028 0a134b5 5aecc17 1af9f6b 24c6700 0a134b5 24c6700 25f7cba adb775b 1af9f6b 423a42f e947bcb c34d039 24c6700 25f7cba 24c6700 5abdf22 24c6700 10f9ea6 423a42f ccb7aa0 370c257 24c6700 0a134b5 c26acbb 3cb0f1d c26acbb 3cb0f1d c26acbb c672f84 c26acbb fdce613 c26acbb 509ca73 1af9f6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 |
from huggingface_hub import InferenceClient
import random
from flask import Flask, request, jsonify, redirect, url_for
from flask_cors import CORS
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1")
connection_string = "postgresql://data_owner:PFAnX9oJp4wV@ep-green-heart-a78sxj65.ap-southeast-2.aws.neon.tech/figurecircle?sslmode=require"
engine = create_engine(connection_string)
Session = sessionmaker(bind=engine)
app = Flask(__name__)
CORS(app)
@app.route('/')
def home():
return jsonify({"message": "Welcome to the Recommendation API!"})
def format_prompt(message):
# Generate a random user prompt and bot response pair
user_prompt = "UserPrompt"
bot_response = "BotResponse"
return f"<s>[INST] {user_prompt} [/INST] {bot_response}</s> [INST] {message} [/INST]"
@app.route('/ai_mentor', methods=['POST'])
def ai_mentor():
data = request.get_json()
message = data.get('message')
if not message:
return jsonify({"message": "Missing message"}), 400
temperature = 0.9
max_new_tokens = 256
top_p = 0.95
repetition_penalty = 1.0
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
# Define prompt for the conversation
prompt = f""" prompt:
Act as an mentor
User: {message}"""
formatted_prompt = format_prompt(prompt)
try:
# Generate response from the Language Model
response = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=False, return_full_text=False)
return jsonify({"response": response}), 200
except Exception as e:
return jsonify({"message": f"Failed to process request: {str(e)}"}), 500
@app.route('/get_course', methods=['POST'])
def get_course():
temperature = 0.9
max_new_tokens = 256
top_p = 0.95
repetition_penalty = 1.0
content = request.json
# user_degree = content.get('degree') # Uncomment this line
user_stream = content.get('stream')
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
prompt = f""" prompt:
You need to act like as recommendation engine for course recommendation for a student. Below are current details.
Degree: {user_degree}
Stream: {user_stream}
Based on current details recommend the courses for higher degree.
Note: Output should be list in below format:
[course1, course2, course3,...]
Return only answer not prompt and unnecessary stuff, also dont add any special characters or punctuation marks
"""
formatted_prompt = format_prompt(prompt)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=False, return_full_text=False)
return jsonify({"ans": stream})
@app.route('/get_mentor', methods=['POST'])
def get_mentor():
temperature = 0.9
max_new_tokens = 256
top_p = 0.95
repetition_penalty = 1.0
content = request.json
# user_degree = content.get('degree') # Uncomment this line
user_stream = content.get('stream')
courses = content.get('courses')
session = Session()
# Query verified mentors
verified_mentors = session.query(Mentor).filter_by(verified=True).all()
mentor_list = [{"id": mentor.id, "mentor_name": mentor.mentor_name, "skills": mentor.skills,
"qualification": mentor.qualification, "experience": mentor.experience,
"verified": mentor.verified} for mentor in verified_mentors]
session.close()
mentors_data= mentor_list
temperature = float(temperature)
if temperature < 1e-2:
temperature = 1e-2
top_p = float(top_p)
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
prompt = f""" prompt:
You need to act like as recommendataion engine for mentor recommendation for student based on below details also the list of mentors with their experience is attached.
Degree: {user_degree}
Stream: {user_stream}
courses opted:{courses}
Mentor list= {mentors_data}
Based on above details recommend the mentor that realtes to above details
Note: Output should be list in below format:
[mentor1,mentor2,mentor3,...]
"""
formatted_prompt = format_prompt(prompt)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=False, return_full_text=False)
return jsonify({"ans": stream})
@app.route('/get_streams', methods=['GET'])
def get_streams():
temperature = 0.9
max_new_tokens = 256
top_p = 0.95
repetition_penalty = 1.0
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
prompt = f""" prompt:
You need to act like as recommendation engine.
List all 40+ streams/branches in like computer science, chemical engineering, aerospace , etc
Note: Output should be list in below format:
[branch1, branch2, branch3,...]
Return only answer not prompt and unnecessary stuff, also dont add any special characters or punctuation marks
"""
formatted_prompt = format_prompt(prompt)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=False, return_full_text=False)
return jsonify({"ans": stream})
@app.route('/get_education_profiles', methods=['GET'])
def get_education_profiles():
temperature = 0.9
max_new_tokens = 256
top_p = 0.95
repetition_penalty = 1.0
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
sectors = ["engineering", "medical", "arts", "commerce", "science", "management"] # Example sectors
prompt = f"""prompt:
You need to act like a recommendation engine.
List all education-related profiles in sectors like {', '.join(sectors)}.
Note: Output should be a list in the below format:
[profile1, profile2, profile3,...]
Return only the answer, not the prompt or unnecessary stuff, and don't add any special characters or punctuation marks.
"""
formatted_prompt = format_prompt(prompt)
education_profiles = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=False, return_full_text=False)
return jsonify({"ans": education_profiles})
if __name__ == '__main__':
app.run(debug=True)
|