Spaces:
Running
Running
import ast | |
from collections import defaultdict | |
def get_category(node, parent=None): | |
"""Determine the category of an AST node or variable context, including variable roles.""" | |
if isinstance(node, (ast.Import, ast.ImportFrom)): | |
return 'import' | |
elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): | |
return 'function' | |
elif isinstance(node, ast.ClassDef): | |
return 'class' | |
elif isinstance(node, ast.If): | |
return 'if' | |
elif isinstance(node, ast.While): | |
return 'while' | |
elif isinstance(node, ast.For): | |
return 'for' | |
elif isinstance(node, ast.Try): | |
return 'try' | |
elif isinstance(node, ast.Return): | |
return 'return' | |
elif isinstance(node, ast.Expr): | |
return 'expression' | |
elif isinstance(node, ast.ExceptHandler): | |
return 'except' | |
elif isinstance(node, (ast.Assign, ast.AnnAssign, ast.AugAssign)): | |
if parent and isinstance(parent, (ast.FunctionDef, ast.AsyncFunctionDef, ast.If, ast.Try, ast.While, ast.For)): | |
return 'assigned_variable' | |
elif isinstance(node, ast.arg): | |
if parent and isinstance(parent, (ast.FunctionDef, ast.AsyncFunctionDef)): | |
return 'input_variable' | |
elif isinstance(node, ast.Name): | |
if parent and isinstance(parent, ast.Return): | |
return 'returned_variable' | |
return 'other' | |
def get_value(node): | |
"""Extract the value of an AST node (e.g., for assignment).""" | |
if isinstance(node, ast.Constant): | |
return str(node.value) | |
elif isinstance(node, ast.Name): | |
return node.id | |
elif isinstance(node, ast.BinOp): | |
return '<expression>' | |
elif isinstance(node, ast.Call): | |
return '<function_call>' | |
return '<complex>' | |
def is_blank_or_comment(line): | |
"""Check if a line is blank or a comment.""" | |
stripped = line.strip() | |
return not stripped or stripped.startswith('#') | |
def create_vector(category, level, location, total_lines, parent_path): | |
"""Create a 6D vector optimized for role similarity, integrating variable roles into category_id.""" | |
category_map = { | |
'import': 1, 'function': 2, 'async_function': 3, 'class': 4, | |
'if': 5, 'while': 6, 'for': 7, 'try': 8, 'expression': 9, 'spacer': 10, | |
'other': 11, 'elif': 12, 'else': 13, 'except': 14, 'finally': 15, 'return': 16, | |
'assigned_variable': 17, 'input_variable': 18, 'returned_variable': 19 | |
} | |
category_id = category_map.get(category, 0) # Default to 0 for unknown categories | |
start_line, end_line = location | |
span = (end_line - start_line + 1) / total_lines | |
center_pos = ((start_line + end_line) / 2) / total_lines | |
parent_depth = len(parent_path) | |
parent_weight = sum(category_map.get(parent.split('[')[0].lower(), 0) * (1 / (i + 1)) | |
for i, parent in enumerate(parent_path)) / max(1, len(category_map)) | |
return [category_id, level, center_pos, span, parent_depth, parent_weight] | |
def collect_variable_usage(tree): | |
"""Collect definitions and uses of variables, respecting scope.""" | |
var_defs = defaultdict(list) # {var_name: [(node_id, scope)]} | |
var_uses = defaultdict(list) # {var_name: [(node_id, scope)]} | |
scope_stack = ['global'] | |
def traverse(node, current_scope): | |
node_id = getattr(node, 'node_id', None) | |
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): | |
scope_stack.append(node.name) | |
current_scope = node.name | |
for arg in node.args.args: | |
var_defs[arg.arg].append((f"InputVariable[{arg.arg}]", current_scope)) | |
for body_node in node.body: | |
traverse(body_node, current_scope) | |
scope_stack.pop() | |
elif isinstance(node, (ast.Assign, ast.AnnAssign)): | |
value = get_value(node.value) if hasattr(node, 'value') else '<unknown>' | |
for target in (node.targets if isinstance(node, ast.Assign) else [node.target]): | |
if isinstance(target, ast.Name): | |
var_defs[target.id].append((node_id, current_scope)) | |
# Parse value for uses | |
for child in ast.walk(node.value): | |
if isinstance(child, ast.Name): | |
var_uses[child.id].append((node_id, current_scope)) | |
elif isinstance(node, ast.Name) and isinstance(node.ctx, ast.Load): | |
var_uses[node.id].append((node_id, current_scope)) | |
for child in ast.iter_child_nodes(node): | |
traverse(child, current_scope) | |
for node in tree.body: | |
node_id = getattr(node, 'node_id', None) | |
if node_id: | |
setattr(node, 'node_id', node_id) | |
traverse(node, 'global') | |
return var_defs, var_uses | |
def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=None, counters=None, processed_lines=None): | |
if total_lines is None: | |
total_lines = len(lines) | |
if parent_path is None: | |
parent_path = [] | |
if counters is None: | |
counters = {cat: 0 for cat in ['import', 'function', 'class', 'if', 'while', 'for', 'try', 'return', 'expression', 'other', 'spacer', 'elif', 'else', 'except', 'finally', 'assigned_variable', 'input_variable', 'returned_variable']} | |
if processed_lines is None: | |
processed_lines = set() | |
parts = [] | |
start_line = getattr(node, 'lineno', prev_end + 1) | |
end_line = getattr(node, 'end_lineno', start_line) | |
if any(line in processed_lines for line in range(start_line, end_line + 1)): | |
return parts, [] | |
category = get_category(node, parent_path[-1] if parent_path else None) or 'other' | |
if category not in counters: | |
category = 'other' | |
counters[category] += 1 | |
node_id = f"{category.capitalize()}[{counters[category]}]" | |
setattr(node, 'node_id', node_id) # Attach node_id to AST node | |
if start_line > prev_end + 1: | |
for i, line in enumerate(lines[prev_end:start_line - 1], prev_end + 1): | |
if i not in processed_lines and is_blank_or_comment(line): | |
counters['spacer'] += 1 | |
spacer_node_id = f"Spacer[{counters['spacer']}]" | |
parts.append({ | |
'category': 'spacer', | |
'source': line, | |
'location': (i, i), | |
'level': level, | |
'vector': create_vector('spacer', level, (i, i), total_lines, parent_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': spacer_node_id | |
}) | |
processed_lines.add(i) | |
current_path = parent_path + [node_id] | |
if start_line not in processed_lines and not is_blank_or_comment(lines[start_line - 1]): | |
part = { | |
'category': category, | |
'source': lines[start_line - 1], | |
'location': (start_line, start_line), | |
'level': level, | |
'vector': create_vector(category, level, (start_line, start_line), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': node_id | |
} | |
if category == 'assigned_variable': | |
part['value'] = get_value(node.value) if hasattr(node, 'value') else '<unknown>' | |
parts.append(part) | |
processed_lines.add(start_line) | |
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)) and node.args.args: | |
for arg in node.args.args: | |
var_start = start_line | |
if var_start not in processed_lines: | |
arg_category = 'input_variable' | |
counters[arg_category] += 1 | |
var_node_id = f"InputVariable[{counters[arg_category]}]" | |
parts.append({ | |
'category': arg_category, | |
'source': f" {arg.arg},", | |
'location': (var_start, var_start), | |
'level': level + 1, | |
'vector': create_vector(arg_category, level + 1, (var_start, var_start), total_lines, current_path), | |
'parent_path': f"{current_path[0]} -> {var_node_id}", | |
'node_id': var_node_id | |
}) | |
processed_lines.add(var_start) | |
nested_prev_end = start_line | |
for attr in ('body', 'orelse', 'handlers', 'finalbody'): | |
if hasattr(node, attr) and getattr(node, attr): | |
for child in getattr(node, attr): | |
child_start = getattr(child, 'lineno', nested_prev_end + 1) | |
child_end = getattr(child, 'end_lineno', child_start) | |
if not any(line in processed_lines for line in range(child_start, child_end + 1)): | |
if attr == 'orelse' and isinstance(node, ast.If) and child_start != start_line: | |
sub_category = 'elif' if 'elif' in lines[child_start - 1] else 'else' | |
if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]): | |
counters[sub_category] += 1 | |
sub_node_id = f"{sub_category.capitalize()}[{counters[sub_category]}]" | |
parts.append({ | |
'category': sub_category, | |
'source': lines[child_start - 1], | |
'location': (child_start, child_start), | |
'level': level, | |
'vector': create_vector(sub_category, level, (child_start, child_start), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': sub_node_id | |
}) | |
processed_lines.add(child_start) | |
child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start) | |
elif attr == 'handlers' and isinstance(child, ast.ExceptHandler): | |
if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]): | |
counters['except'] += 1 | |
sub_node_id = f"Except[{counters['except']}]" | |
parts.append({ | |
'category': 'except', | |
'source': lines[child_start - 1], | |
'location': (child_start, child_start), | |
'level': level, | |
'vector': create_vector('except', level, (child_start, child_start), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': sub_node_id | |
}) | |
processed_lines.add(child_start) | |
child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start) | |
elif attr == 'finalbody': | |
if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]): | |
counters['finally'] += 1 | |
sub_node_id = f"Finally[{counters['finally']}]" | |
parts.append({ | |
'category': 'finally', | |
'source': lines[child_start - 1], | |
'location': (child_start, child_start), | |
'level': level, | |
'vector': create_vector('finally', level, (child_start, child_start), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': sub_node_id | |
}) | |
processed_lines.add(child_start) | |
child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start) | |
else: | |
if isinstance(child, (ast.Assign, ast.AnnAssign, ast.AugAssign)): | |
for target in (child.targets if isinstance(child, ast.Assign) else [child.target]): | |
if isinstance(target, ast.Name): | |
var_start = child.lineno | |
if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]): | |
counters['assigned_variable'] += 1 | |
var_node_id = f"AssignedVariable[{counters['assigned_variable']}]" | |
value = get_value(child.value) if hasattr(child, 'value') else '<unknown>' | |
parts.append({ | |
'category': 'assigned_variable', | |
'source': lines[var_start - 1], | |
'location': (var_start, var_start), | |
'level': level + 1, | |
'vector': create_vector('assigned_variable', level + 1, (var_start, var_start), total_lines, current_path), | |
'parent_path': f"{current_path[0]} -> {var_node_id}", | |
'node_id': var_node_id, | |
'value': value | |
}) | |
processed_lines.add(var_start) | |
elif isinstance(child, ast.Return): | |
for value in ast.walk(child): | |
if isinstance(value, ast.Name): | |
var_start = child.lineno | |
if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]): | |
counters['returned_variable'] += 1 | |
var_node_id = f"ReturnedVariable[{counters['returned_variable']}]" | |
parts.append({ | |
'category': 'returned_variable', | |
'source': lines[var_start - 1], | |
'location': (var_start, var_start), | |
'level': level + 1, | |
'vector': create_vector('returned_variable', level + 1, (var_start, var_start), total_lines, current_path), | |
'parent_path': f"{current_path[0]} -> {var_node_id}", | |
'node_id': var_node_id | |
}) | |
processed_lines.add(var_start) | |
child_parts, child_seq = parse_node(child, lines, nested_prev_end, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = child_parts[-1]['location'][1] if child_parts else nested_prev_end | |
if nested_prev_end > start_line and start_line not in processed_lines: | |
final_end = nested_prev_end | |
if start_line not in processed_lines: | |
parts[-1]['location'] = (start_line, final_end) | |
parts[-1]['source'] = ''.join(lines[start_line - 1:final_end]) | |
parts[-1]['vector'] = create_vector(category, level, (start_line, final_end), total_lines, current_path) | |
processed_lines.update(range(start_line, final_end + 1)) | |
return parts, [] | |
def parse_python_code(code): | |
lines = code.splitlines(keepends=True) | |
total_lines = len(lines) | |
try: | |
tree = ast.parse(code) | |
except SyntaxError: | |
return ([{'category': 'error', 'source': 'Invalid Python code', 'location': (1, 1), 'level': 0, 'vector': [0, 0, 1.0, 0.0, 0, 0], 'parent_path': 'Top-Level', 'node_id': 'Error[1]'}], ['error']) | |
parts = [] | |
prev_end = 0 | |
processed_lines = set() | |
var_defs, var_uses = collect_variable_usage(tree) | |
for stmt in tree.body: | |
stmt_parts, _ = parse_node(stmt, lines, prev_end, total_lines=total_lines, processed_lines=processed_lines) | |
for part in stmt_parts: | |
part['var_defs'] = var_defs | |
part['var_uses'] = var_uses | |
parts.extend(stmt_parts) | |
prev_end = stmt_parts[-1]['location'][1] if stmt_parts else prev_end | |
if prev_end < total_lines: | |
for i, line in enumerate(lines[prev_end:], prev_end + 1): | |
if i not in processed_lines and is_blank_or_comment(line): | |
counters = {'spacer': 0} | |
counters['spacer'] += 1 | |
spacer_node_id = f"Spacer[{counters['spacer']}]" | |
parts.append({ | |
'category': 'spacer', | |
'source': line, | |
'location': (i, i), | |
'level': 0, | |
'vector': create_vector('spacer', 0, (i, i), total_lines, []), | |
'parent_path': 'Top-Level', | |
'node_id': spacer_node_id | |
}) | |
processed_lines.add(i) | |
return parts, [] |