Not-Grim-Refer commited on
Commit
88bd7aa
·
1 Parent(s): 76069c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -18
app.py CHANGED
@@ -8,46 +8,31 @@ debug = True
8
  logging_level = logging.DEBUG if debug else logging.INFO
9
  logging.basicConfig(level=logging_level)
10
 
11
- os.system("pip install -r requirements.txt")
12
- os.system("pip freeze")
13
- os.system("pip install gradio==2.3.0")
14
- os.system("pip install requests==2.26.0")
15
- os.system("pip install transformers==4.11.3")
16
-
17
  # Initialize the CodeBERT model and tokenizer
18
  tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeBERT-base")
19
  model = AutoModelForSeq2SeqLM.from_pretrained("microsoft/CodeBERT-base")
20
 
21
  def fetch_repo_contents(repo_url):
22
- # Extract username and repo name from URL
23
  username, repo_name = repo_url.split("github.com/")[-1].split("/")
24
-
25
- # Fetch repo contents using GitHub API
26
  api_url = f"https://api.github.com/repos/{username}/{repo_name}/contents"
27
  response = requests.get(api_url)
28
  response.raise_for_status()
29
  return response.json()
30
 
31
  def generate_chatbot_response(repo_url, question):
32
- # Fetch repository contents
33
  repo_contents = fetch_repo_contents(repo_url)
34
-
35
- # Generate a prompt based on the user's question and repository contents
36
  prompt = f"Answer the question about the repository {repo_url}: {question}\n\n"
37
  for item in repo_contents:
38
  prompt += f"{item['name']}:\n{item['download_url']}\n\n"
39
-
40
- # Tokenize the prompt and generate a response using the CodeBERT model
41
  inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=1024, truncation=True)
42
  outputs = model.generate(inputs, max_length=150, num_return_sequences=1)
43
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
44
-
45
  return response
46
 
47
  # Gradio UI
48
- repo_url_input = gr.inputs.Textbox(lines=1, label="GitHub Repository URL")
49
- question_input = gr.inputs.Textbox(lines=2, label="Question")
50
- output_text = gr.outputs.Textbox(label="Answer")
51
 
52
  gr.Interface(
53
  generate_chatbot_response,
 
8
  logging_level = logging.DEBUG if debug else logging.INFO
9
  logging.basicConfig(level=logging_level)
10
 
 
 
 
 
 
 
11
  # Initialize the CodeBERT model and tokenizer
12
  tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeBERT-base")
13
  model = AutoModelForSeq2SeqLM.from_pretrained("microsoft/CodeBERT-base")
14
 
15
  def fetch_repo_contents(repo_url):
 
16
  username, repo_name = repo_url.split("github.com/")[-1].split("/")
 
 
17
  api_url = f"https://api.github.com/repos/{username}/{repo_name}/contents"
18
  response = requests.get(api_url)
19
  response.raise_for_status()
20
  return response.json()
21
 
22
  def generate_chatbot_response(repo_url, question):
 
23
  repo_contents = fetch_repo_contents(repo_url)
 
 
24
  prompt = f"Answer the question about the repository {repo_url}: {question}\n\n"
25
  for item in repo_contents:
26
  prompt += f"{item['name']}:\n{item['download_url']}\n\n"
 
 
27
  inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=1024, truncation=True)
28
  outputs = model.generate(inputs, max_length=150, num_return_sequences=1)
29
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
30
  return response
31
 
32
  # Gradio UI
33
+ repo_url_input = gr.inputs.Text(label="GitHub Repository URL")
34
+ question_input = gr.inputs.Text(label="Question")
35
+ output_text = gr.outputs.Text(label="Answer")
36
 
37
  gr.Interface(
38
  generate_chatbot_response,