Spaces:
Runtime error
Runtime error
import gradio as gr | |
import os | |
import aiohttp | |
import asyncio | |
from git import Repo, GitCommandError | |
from pathlib import Path | |
from datetime import datetime | |
import shutil | |
import json | |
import logging | |
import re | |
from typing import Dict, List, Optional, Tuple | |
import subprocess | |
import plotly.express as px | |
import plotly.graph_objects as go | |
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer | |
import threading | |
from http.server import HTTPServer, BaseHTTPRequestHandler | |
import speech_recognition as sr | |
from code_editor import code_editor | |
from functools import lru_cache | |
import hashlib | |
import markdown2 | |
from concurrent.futures import ThreadPoolExecutor | |
from hdb_scan import HDBSCAN | |
import websockets | |
from websockets.exceptions import ConnectionClosed | |
from code_editor import code_editor, generate_code_snippet | |
# ========== Configuration ========== | |
WORKSPACE = Path("/tmp/issue_workspace") | |
WORKSPACE.mkdir(exist_ok=True) | |
GITHUB_API = "https://api.github.com/repos" | |
HF_INFERENCE_API = "https://api-inference.huggingface.co/models" | |
WEBHOOK_PORT = 8000 | |
WS_PORT = 8001 | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
executor = ThreadPoolExecutor(max_workers=4) | |
HF_MODELS = { | |
"Mistral-8x7B": "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"Llama-3-8B": "meta-llama/Meta-Llama-3-8B", | |
"CodeLlama-34B": "codellama/CodeLlama-34b-Instruct-hf", | |
"StarCoder2": "bigcode/starcoder2-15b" | |
} | |
DEFAULT_MODEL = "mistralai/Mixtral-8x7B-Instruct-v0.1" | |
# ========== Modern Theme ========== | |
theme = gr.themes.Soft( | |
primary_hue="violet", | |
secondary_hue="emerald", | |
radius_size="xl", | |
font=[gr.themes.GoogleFont("Inter"), "ui-sans-serif", "system-ui"] | |
).set( | |
button_primary_background_fill="linear-gradient(90deg, #8B5CF6 0%, #EC4899 100%)", | |
button_primary_text_color="white", | |
button_primary_border_radius="12px", | |
block_label_text_size="lg", | |
block_label_text_weight="600", | |
block_title_text_size="xl", | |
block_title_text_weight="800", | |
panel_background_fill="white", | |
panel_border_radius="16px", | |
block_shadow="*shadow_drop_lg", | |
) | |
# ========== Enhanced Webhook Handler ========== | |
class WebhookHandler(BaseHTTPRequestHandler): | |
def do_POST(self): | |
content_length = int(self.headers['Content-Length']) | |
payload = json.loads(self.rfile.read(content_length).decode()) | |
event = self.headers.get('X-GitHub-Event') | |
if event == 'issues': | |
action = payload.get('action') | |
if action in ['opened', 'reopened', 'closed', 'assigned']: | |
asyncio.run_coroutine_threadsafe( | |
manager.handle_webhook_event(event, action, payload), | |
asyncio.get_event_loop() | |
) | |
self.send_response(200) | |
self.end_headers() | |
# ========== AI-Powered Issue Manager ========== | |
class IssueManager: | |
def __init__(self): | |
self.issues: Dict[int, dict] = {} | |
self.repo_url: Optional[str] = None | |
self.repo: Optional[Repo] = None | |
self.current_issue: Optional[int] = None | |
self.github_token: Optional[str] = None | |
self.hf_token: Optional[str] = None | |
self.collaborators: Dict[str, dict] = {} | |
self.points: int = 0 | |
self.severity_rules: Dict[str, List[str]] = { | |
"Critical": ["critical", "urgent", "security", "crash"], | |
"High": ["high", "important", "error", "regression"], | |
"Medium": ["medium", "bug", "performance"], | |
"Low": ["low", "documentation", "enhancement"] | |
} | |
self.issue_clusters: Dict[int, List[int]] = {} # Store clusters | |
self._init_local_models() | |
self.ws_clients: List[websockets.WebSocketClientProtocol] = [] | |
self.code_editors: Dict[int, OTCodeEditor] = {} # Store code editors for each issue | |
def _init_local_models(self): | |
self.code_model = pipeline( | |
"text-generation", | |
model="codellama/CodeLlama-7b-Instruct-hf", | |
device_map="auto", | |
torch_dtype="auto" | |
) | |
self.summarizer = pipeline( | |
"summarization", | |
model="philschmid/bart-large-cnn-samsum", | |
device_map="auto" | |
) | |
async def cached_suggestion(self, issue_hash: str, model: str): | |
return await self.suggest_resolution(issue_hash, model) | |
async def handle_webhook_event(self, event: str, action: str, payload: dict): | |
logger.info(f"Processing {event} {action} event") | |
if action == 'closed': | |
self.issues.pop(payload['issue']['number'], None) | |
else: | |
await self.crawl_issues(self.repo_url, self.github_token, self.hf_token) | |
async def crawl_issues(self, repo_url: str, github_token: str, hf_token: str) -> Tuple[bool, str]: | |
try: | |
self.repo_url = repo_url | |
self.github_token = github_token | |
self.hf_token = hf_token | |
self.repo = Repo.clone_from(repo_url, WORKSPACE / "repo") | |
headers = {"Authorization": f"token {github_token}"} | |
async with aiohttp.ClientSession(headers=headers) as session: | |
async with session.get(f"{GITHUB_API}/{repo_url}/issues") as response: | |
issues = await response.json() | |
for issue in issues: | |
self.issues[issue['number']] = issue | |
await self._cluster_similar_issues() | |
return True, f"Found {len(self.issues)} issues (clustered into {len(self.issue_clusters)} groups)" | |
except Exception as e: | |
logger.error(f"Crawl error: {e}") | |
return False, str(e) | |
async def _cluster_similar_issues(self): | |
embeddings = await self._generate_embeddings() | |
# Use HDBSCAN for clustering | |
clusterer = HDBSCAN(min_cluster_size=2, metric='cosine') | |
clusters = clusterer.fit_predict(embeddings) | |
self.issue_clusters = {} | |
for i, cluster_id in enumerate(clusters): | |
if cluster_id not in self.issue_clusters: | |
self.issue_clusters[cluster_id] = [] | |
self.issue_clusters[cluster_id].append(i) | |
async def _generate_embeddings(self): | |
async with aiohttp.ClientSession() as session: | |
texts = [f"{i['title']} {i['body']}" for i in self.issues.values()] | |
response = await session.post( | |
f"{HF_INFERENCE_API}/sentence-transformers/all-mpnet-base-v2", | |
headers={"Authorization": f"Bearer {self.hf_token}"}, | |
json={"inputs": texts} | |
) | |
return await response.json() | |
async def generate_code_patch(self, issue_number: int) -> dict: | |
issue = self.issues[issue_number] | |
context = await self._get_code_context(issue_number) | |
prompt = f"""<issue> | |
{issue['title']} | |
{issue['body']} | |
</issue> | |
<code_context> | |
{context} | |
</code_context> | |
Generate a JSON patch file with specific changes needed to resolve this issue.""" | |
response = self.code_model( | |
prompt, | |
max_length=1024, | |
temperature=0.2, | |
return_full_text=False | |
) | |
try: | |
return json.loads(response[0]['generated_text']) | |
except json.JSONDecodeError: | |
return {"error": "Failed to parse AI-generated patch"} | |
async def _get_code_context(self, issue_number: int) -> str: | |
repo_path = WORKSPACE / f"repo-{issue_number}" | |
code_files = list(repo_path.glob('**/*.py')) + list(repo_path.glob('**/*.js')) | |
return "\n".join(f.read_text()[:1000] for f in code_files[:5]) | |
async def suggest_resolution(self, issue_hash: str, model: str) -> str: | |
issue = self.issues[int(issue_hash)] | |
prompt = f""" | |
## Issue: {issue['title']} | |
{issue['body']} | |
Suggest a solution to this issue. | |
""" | |
async with aiohttp.ClientSession() as session: | |
response = await session.post( | |
f"{HF_INFERENCE_API}/{model}", | |
headers={"Authorization": f"Bearer {self.hf_token}"}, | |
json={"inputs": prompt} | |
) | |
return await response.json() | |
async def broadcast_collaboration_status(self): | |
while True: | |
try: | |
await asyncio.sleep(1) | |
# Send collaborator status to all connected clients | |
await asyncio.gather( | |
*[client.send(json.dumps([{"name": name, "status": status} for name, status in self.collaborators.items()])) for client in self.ws_clients] | |
) | |
except ConnectionClosed: | |
# Handle client disconnections | |
pass | |
async def handle_code_editor_update(self, issue_num: int, delta: str): | |
if issue_num not in self.code_editors: | |
return | |
self.code_editors[issue_num].apply_delta(json.loads(delta)) | |
await asyncio.gather( | |
*[client.send(json.dumps({"type": "code_update", "issue_num": issue_num, "delta": delta})) for client in self.ws_clients] | |
) | |
# ========== Enhanced UI Components ========== | |
def create_ui(): | |
with gr.Blocks(theme=theme, title="π€ AI Issue Resolver Pro", css=".gradio-container {max-width: 1200px !important}") as app: | |
gr.Markdown(""" | |
# π AI Issue Resolver Pro | |
*Next-generation issue resolution powered by AI collaboration* | |
""") | |
with gr.Row(variant="panel"): | |
with gr.Column(scale=2): | |
repo_url = gr.Textbox(label="GitHub Repo", placeholder="https://github.com/org/repo", info="Connect your repository") | |
github_token = gr.Textbox(label="GitHub Token", type="password") | |
hf_token = gr.Textbox(label="HF Token", type="password") | |
with gr.Column(scale=1): | |
model_select = gr.Dropdown(choices=list(HF_MODELS.keys()), value="Mistral-8x7B", | |
label="AI Model", info="Choose your resolution strategy") | |
language_select = gr.Dropdown(choices=["python", "javascript", "java", "c", "cpp", "html", "css", "bash", "ruby", "go", "php", "rust", "typescript"], | |
value="python", label="Select Language", info="Choose the programming language for the code editor") | |
crawl_btn = gr.Button("π Scan Repository", variant="primary") | |
with gr.Tabs(): | |
with gr.Tab("π Issue Board", id="board"): | |
with gr.Row(): | |
issue_list = gr.Dataframe( | |
headers=["ID", "Title", "Severity", "Cluster"], | |
datatype=["number", "str", "str", "number"], | |
interactive=True, | |
height=600 | |
) | |
with gr.Column(scale=1): | |
stats_plot = gr.Plot() | |
collab_status = gr.HTML("<h3>π₯ Active Collaborators</h3><div id='collab-list'></div>") | |
with gr.Tab("π» Resolution Studio", id="studio"): | |
with gr.Row(): | |
with gr.Column(scale=1): | |
issue_num = gr.Number(label="Issue #", precision=0) | |
issue_viz = gr.HTML() | |
ai_tools = gr.Accordion("π οΈ AI Tools") | |
with ai_tools: | |
suggest_btn = gr.Button("π§ Suggest Resolution") | |
patch_btn = gr.Button("π Generate Patch") | |
test_btn = gr.Button("π§ͺ Create Tests") | |
impact_btn = gr.Button("π Impact Analysis") | |
with gr.Column(scale=2): | |
with gr.Tabs(): | |
with gr.Tab("Code Editor"): | |
code_edit = gr.HTML(elem_id="code-editor-container") | |
with gr.Tab("AI Chat"): | |
chat = gr.ChatInterface( | |
self._ai_chat, | |
additional_inputs=[issue_num] | |
) | |
with gr.Tab("π Analytics", id="analytics"): | |
with gr.Row(): | |
gr.Markdown("### π Resolution Timeline") | |
timeline = gr.Timeline() | |
with gr.Row(): | |
gr.Markdown("### π Achievement System") | |
badges = gr.HTML("<div class='badges'></div>") | |
# Enhanced Event Handlers | |
async def generate_patch(issue_num): | |
patch = await manager.generate_code_patch(issue_num) | |
return gr.JSON(value=patch) | |
def update_code_editor(files): | |
return code_editor(value=files, language=language_select.value) | |
issue_list.select( | |
fn=lambda evt: (evt[0], manager.issues[evt[0]]['body']), | |
outputs=[issue_num, issue_body] | |
).then( | |
fn=lambda num: generate_issue_preview(num), | |
outputs=issue_viz | |
) | |
app.load( | |
fn=init_collaboration, | |
inputs=[], | |
outputs=collab_status, | |
_js=web_socket_js() | |
) | |
crawl_btn.click( | |
fn=lambda repo, token, hf_token: manager.crawl_issues(repo, token, hf_token), | |
inputs=[repo_url, github_token, hf_token], | |
outputs=[issue_list, stats_plot] | |
) | |
suggest_btn.click( | |
fn=lambda issue, model: manager.cached_suggestion(issue, model), | |
inputs=[issue_num, model_select], | |
outputs=chat | |
) | |
patch_btn.click( | |
fn=generate_patch, | |
inputs=[issue_num], | |
outputs=chat | |
) | |
# Add more event handlers for other AI tools | |
# ... | |
issue_num.change( | |
fn=lambda issue_num: create_code_editor(issue_num, language_select.value), | |
inputs=[issue_num, language_select], | |
outputs=code_edit | |
) | |
# Add real-time collaboration | |
def web_socket_js(): | |
return """ | |
<script> | |
const collabWs = new WebSocket('ws://localhost:8001'); | |
collabWs.onmessage = function(event) { | |
const data = JSON.parse(event.data); | |
if (data.type === 'code_update') { | |
const issueNum = data.issue_num; | |
const delta = data.delta; | |
const codeEditor = document.getElementById(`code-editor-${issueNum}`); | |
if (codeEditor) { | |
codeEditor.applyDelta(delta); | |
} | |
} else if (data.type === 'collaboration_status') { | |
document.getElementById('collab-list').innerHTML = | |
data.map(u => `<div class="collab-item">${u.name}: ${u.status}</div>`).join(''); | |
} | |
}; | |
</script> | |
""" | |
# Start enhanced webhook server | |
webhook_server = HTTPServer(("", WEBHOOK_PORT), WebhookHandler) | |
threading.Thread(target=webhook_server.serve_forever, daemon=True).start() | |
# Start WebSocket server | |
async def start_ws_server(): | |
async with websockets.serve(handle_ws_connection, "localhost", WS_PORT): | |
await asyncio.Future() | |
async def handle_ws_connection(websocket: websockets.WebSocketClientProtocol, path): | |
manager.ws_clients.append(websocket) | |
try: | |
async for message in websocket: | |
data = json.loads(message) | |
if data.get("type") == "code_update": | |
await manager.handle_code_editor_update(data["issue_num"], data["delta"]) | |
finally: | |
manager.ws_clients.remove(websocket) | |
asyncio.run(start_ws_server()) | |
threading.Thread(target=lambda: asyncio.run(manager.broadcast_collaboration_status()), daemon=True).start() | |
return app | |
# ========== Execution ========== | |
if __name__ == "__main__": | |
manager = IssueManager() | |
app = create_ui() | |
app.launch( | |
share=True, | |
auth=("admin", os.getenv("APP_PASSWORD")), | |
favicon_path="https://huggingface.co/front/assets/huggingface_logo-noborder.svg" | |
) |