Spaces:
Sleeping
Sleeping
Update parser.py
Browse files
parser.py
CHANGED
|
@@ -1,332 +1,134 @@
|
|
| 1 |
import ast
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
def
|
| 5 |
-
"""
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
return 'if'
|
| 14 |
-
elif isinstance(node, ast.While):
|
| 15 |
-
return 'while'
|
| 16 |
-
elif isinstance(node, ast.For):
|
| 17 |
-
return 'for'
|
| 18 |
-
elif isinstance(node, ast.Try):
|
| 19 |
-
return 'try'
|
| 20 |
-
elif isinstance(node, ast.Return):
|
| 21 |
-
return 'return'
|
| 22 |
-
elif isinstance(node, ast.Expr):
|
| 23 |
-
return 'expression'
|
| 24 |
-
elif isinstance(node, ast.ExceptHandler):
|
| 25 |
-
return 'except'
|
| 26 |
-
elif isinstance(node, (ast.Assign, ast.AnnAssign, ast.AugAssign)):
|
| 27 |
-
if parent and isinstance(parent, (ast.FunctionDef, ast.AsyncFunctionDef, ast.If, ast.Try, ast.While, ast.For)):
|
| 28 |
-
return 'assigned_variable'
|
| 29 |
-
elif isinstance(node, ast.arg):
|
| 30 |
-
if parent and isinstance(parent, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
| 31 |
-
return 'input_variable'
|
| 32 |
-
elif isinstance(node, ast.Name):
|
| 33 |
-
if parent and isinstance(parent, ast.Return):
|
| 34 |
-
return 'returned_variable'
|
| 35 |
-
return 'other'
|
| 36 |
-
|
| 37 |
-
def get_value(node):
|
| 38 |
-
"""Extract the value of an AST node (e.g., for assignment)."""
|
| 39 |
-
if isinstance(node, ast.Constant):
|
| 40 |
-
return str(node.value)
|
| 41 |
-
elif isinstance(node, ast.Name):
|
| 42 |
-
return node.id
|
| 43 |
-
elif isinstance(node, ast.BinOp):
|
| 44 |
-
return '<expression>'
|
| 45 |
-
elif isinstance(node, ast.Call):
|
| 46 |
-
return '<function_call>'
|
| 47 |
-
return '<complex>'
|
| 48 |
-
def is_blank_or_comment(line):
|
| 49 |
-
"""Check if a line is blank or a comment."""
|
| 50 |
-
stripped = line.strip()
|
| 51 |
-
return not stripped or stripped.startswith('#')
|
| 52 |
|
| 53 |
def create_vector(category, level, location, total_lines, parent_path):
|
| 54 |
-
"""
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
span = (
|
| 64 |
-
|
| 65 |
parent_depth = len(parent_path)
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
current_scope = node.name
|
| 82 |
-
for arg in node.args.args:
|
| 83 |
-
var_defs[arg.arg].append((f"InputVariable[{arg.arg}]", current_scope))
|
| 84 |
-
for body_node in node.body:
|
| 85 |
-
traverse(body_node, current_scope)
|
| 86 |
-
scope_stack.pop()
|
| 87 |
-
elif isinstance(node, (ast.Assign, ast.AnnAssign)):
|
| 88 |
-
value = get_value(node.value) if hasattr(node, 'value') else '<unknown>'
|
| 89 |
-
for target in (node.targets if isinstance(node, ast.Assign) else [node.target]):
|
| 90 |
-
if isinstance(target, ast.Name):
|
| 91 |
-
var_defs[target.id].append((node_id, current_scope))
|
| 92 |
-
# Parse value for uses
|
| 93 |
-
for child in ast.walk(node.value):
|
| 94 |
-
if isinstance(child, ast.Name):
|
| 95 |
-
var_uses[child.id].append((node_id, current_scope))
|
| 96 |
-
elif isinstance(node, ast.Name) and isinstance(node.ctx, ast.Load):
|
| 97 |
-
var_uses[node.id].append((node_id, current_scope))
|
| 98 |
-
for child in ast.iter_child_nodes(node):
|
| 99 |
-
traverse(child, current_scope)
|
| 100 |
-
|
| 101 |
-
for node in tree.body:
|
| 102 |
-
node_id = getattr(node, 'node_id', None)
|
| 103 |
-
if node_id:
|
| 104 |
-
setattr(node, 'node_id', node_id)
|
| 105 |
-
traverse(node, 'global')
|
| 106 |
-
|
| 107 |
-
return var_defs, var_uses
|
| 108 |
-
|
| 109 |
-
def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=None, counters=None, processed_lines=None):
|
| 110 |
-
if total_lines is None:
|
| 111 |
-
total_lines = len(lines)
|
| 112 |
-
if parent_path is None:
|
| 113 |
-
parent_path = []
|
| 114 |
-
if counters is None:
|
| 115 |
-
counters = {cat: 0 for cat in ['import', 'function', 'class', 'if', 'while', 'for', 'try', 'return', 'expression', 'other', 'spacer', 'elif', 'else', 'except', 'finally', 'assigned_variable', 'input_variable', 'returned_variable']}
|
| 116 |
-
if processed_lines is None:
|
| 117 |
-
processed_lines = set()
|
| 118 |
-
|
| 119 |
-
parts = []
|
| 120 |
-
start_line = getattr(node, 'lineno', prev_end + 1)
|
| 121 |
-
end_line = getattr(node, 'end_lineno', start_line)
|
| 122 |
-
|
| 123 |
-
if any(line in processed_lines for line in range(start_line, end_line + 1)):
|
| 124 |
-
return parts, []
|
| 125 |
-
|
| 126 |
-
category = get_category(node, parent_path[-1] if parent_path else None) or 'other'
|
| 127 |
-
if category not in counters:
|
| 128 |
-
category = 'other'
|
| 129 |
-
counters[category] += 1
|
| 130 |
-
node_id = f"{category.capitalize()}[{counters[category]}]"
|
| 131 |
-
setattr(node, 'node_id', node_id) # Attach node_id to AST node
|
| 132 |
-
|
| 133 |
-
if start_line > prev_end + 1:
|
| 134 |
-
for i, line in enumerate(lines[prev_end:start_line - 1], prev_end + 1):
|
| 135 |
-
if i not in processed_lines and is_blank_or_comment(line):
|
| 136 |
-
counters['spacer'] += 1
|
| 137 |
-
spacer_node_id = f"Spacer[{counters['spacer']}]"
|
| 138 |
-
parts.append({
|
| 139 |
-
'category': 'spacer',
|
| 140 |
-
'source': line,
|
| 141 |
-
'location': (i, i),
|
| 142 |
-
'level': level,
|
| 143 |
-
'vector': create_vector('spacer', level, (i, i), total_lines, parent_path),
|
| 144 |
-
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
|
| 145 |
-
'node_id': spacer_node_id
|
| 146 |
-
})
|
| 147 |
-
processed_lines.add(i)
|
| 148 |
-
|
| 149 |
-
current_path = parent_path + [node_id]
|
| 150 |
-
if start_line not in processed_lines and not is_blank_or_comment(lines[start_line - 1]):
|
| 151 |
-
part = {
|
| 152 |
-
'category': category,
|
| 153 |
-
'source': lines[start_line - 1],
|
| 154 |
-
'location': (start_line, start_line),
|
| 155 |
-
'level': level,
|
| 156 |
-
'vector': create_vector(category, level, (start_line, start_line), total_lines, current_path),
|
| 157 |
-
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
|
| 158 |
-
'node_id': node_id
|
| 159 |
-
}
|
| 160 |
-
if category == 'assigned_variable':
|
| 161 |
-
part['value'] = get_value(node.value) if hasattr(node, 'value') else '<unknown>'
|
| 162 |
-
parts.append(part)
|
| 163 |
-
processed_lines.add(start_line)
|
| 164 |
-
|
| 165 |
-
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)) and node.args.args:
|
| 166 |
-
for arg in node.args.args:
|
| 167 |
-
var_start = start_line
|
| 168 |
-
if var_start not in processed_lines:
|
| 169 |
-
arg_category = 'input_variable'
|
| 170 |
-
counters[arg_category] += 1
|
| 171 |
-
var_node_id = f"InputVariable[{counters[arg_category]}]"
|
| 172 |
-
parts.append({
|
| 173 |
-
'category': arg_category,
|
| 174 |
-
'source': f" {arg.arg},",
|
| 175 |
-
'location': (var_start, var_start),
|
| 176 |
-
'level': level + 1,
|
| 177 |
-
'vector': create_vector(arg_category, level + 1, (var_start, var_start), total_lines, current_path),
|
| 178 |
-
'parent_path': f"{current_path[0]} -> {var_node_id}",
|
| 179 |
-
'node_id': var_node_id
|
| 180 |
-
})
|
| 181 |
-
processed_lines.add(var_start)
|
| 182 |
-
|
| 183 |
-
nested_prev_end = start_line
|
| 184 |
-
for attr in ('body', 'orelse', 'handlers', 'finalbody'):
|
| 185 |
-
if hasattr(node, attr) and getattr(node, attr):
|
| 186 |
-
for child in getattr(node, attr):
|
| 187 |
-
child_start = getattr(child, 'lineno', nested_prev_end + 1)
|
| 188 |
-
child_end = getattr(child, 'end_lineno', child_start)
|
| 189 |
-
if not any(line in processed_lines for line in range(child_start, child_end + 1)):
|
| 190 |
-
if attr == 'orelse' and isinstance(node, ast.If) and child_start != start_line:
|
| 191 |
-
sub_category = 'elif' if 'elif' in lines[child_start - 1] else 'else'
|
| 192 |
-
if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]):
|
| 193 |
-
counters[sub_category] += 1
|
| 194 |
-
sub_node_id = f"{sub_category.capitalize()}[{counters[sub_category]}]"
|
| 195 |
-
parts.append({
|
| 196 |
-
'category': sub_category,
|
| 197 |
-
'source': lines[child_start - 1],
|
| 198 |
-
'location': (child_start, child_start),
|
| 199 |
-
'level': level,
|
| 200 |
-
'vector': create_vector(sub_category, level, (child_start, child_start), total_lines, current_path),
|
| 201 |
-
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
|
| 202 |
-
'node_id': sub_node_id
|
| 203 |
-
})
|
| 204 |
-
processed_lines.add(child_start)
|
| 205 |
-
child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines)
|
| 206 |
-
parts.extend(child_parts)
|
| 207 |
-
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start)
|
| 208 |
-
elif attr == 'handlers' and isinstance(child, ast.ExceptHandler):
|
| 209 |
-
if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]):
|
| 210 |
-
counters['except'] += 1
|
| 211 |
-
sub_node_id = f"Except[{counters['except']}]"
|
| 212 |
-
parts.append({
|
| 213 |
-
'category': 'except',
|
| 214 |
-
'source': lines[child_start - 1],
|
| 215 |
-
'location': (child_start, child_start),
|
| 216 |
-
'level': level,
|
| 217 |
-
'vector': create_vector('except', level, (child_start, child_start), total_lines, current_path),
|
| 218 |
-
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
|
| 219 |
-
'node_id': sub_node_id
|
| 220 |
-
})
|
| 221 |
-
processed_lines.add(child_start)
|
| 222 |
-
child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines)
|
| 223 |
-
parts.extend(child_parts)
|
| 224 |
-
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start)
|
| 225 |
-
elif attr == 'finalbody':
|
| 226 |
-
if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]):
|
| 227 |
-
counters['finally'] += 1
|
| 228 |
-
sub_node_id = f"Finally[{counters['finally']}]"
|
| 229 |
-
parts.append({
|
| 230 |
-
'category': 'finally',
|
| 231 |
-
'source': lines[child_start - 1],
|
| 232 |
-
'location': (child_start, child_start),
|
| 233 |
-
'level': level,
|
| 234 |
-
'vector': create_vector('finally', level, (child_start, child_start), total_lines, current_path),
|
| 235 |
-
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
|
| 236 |
-
'node_id': sub_node_id
|
| 237 |
-
})
|
| 238 |
-
processed_lines.add(child_start)
|
| 239 |
-
child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines)
|
| 240 |
-
parts.extend(child_parts)
|
| 241 |
-
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start)
|
| 242 |
-
else:
|
| 243 |
-
if isinstance(child, (ast.Assign, ast.AnnAssign, ast.AugAssign)):
|
| 244 |
-
for target in (child.targets if isinstance(child, ast.Assign) else [child.target]):
|
| 245 |
-
if isinstance(target, ast.Name):
|
| 246 |
-
var_start = child.lineno
|
| 247 |
-
if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]):
|
| 248 |
-
counters['assigned_variable'] += 1
|
| 249 |
-
var_node_id = f"AssignedVariable[{counters['assigned_variable']}]"
|
| 250 |
-
value = get_value(child.value) if hasattr(child, 'value') else '<unknown>'
|
| 251 |
-
parts.append({
|
| 252 |
-
'category': 'assigned_variable',
|
| 253 |
-
'source': lines[var_start - 1],
|
| 254 |
-
'location': (var_start, var_start),
|
| 255 |
-
'level': level + 1,
|
| 256 |
-
'vector': create_vector('assigned_variable', level + 1, (var_start, var_start), total_lines, current_path),
|
| 257 |
-
'parent_path': f"{current_path[0]} -> {var_node_id}",
|
| 258 |
-
'node_id': var_node_id,
|
| 259 |
-
'value': value
|
| 260 |
-
})
|
| 261 |
-
processed_lines.add(var_start)
|
| 262 |
-
elif isinstance(child, ast.Return):
|
| 263 |
-
for value in ast.walk(child):
|
| 264 |
-
if isinstance(value, ast.Name):
|
| 265 |
-
var_start = child.lineno
|
| 266 |
-
if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]):
|
| 267 |
-
counters['returned_variable'] += 1
|
| 268 |
-
var_node_id = f"ReturnedVariable[{counters['returned_variable']}]"
|
| 269 |
-
parts.append({
|
| 270 |
-
'category': 'returned_variable',
|
| 271 |
-
'source': lines[var_start - 1],
|
| 272 |
-
'location': (var_start, var_start),
|
| 273 |
-
'level': level + 1,
|
| 274 |
-
'vector': create_vector('returned_variable', level + 1, (var_start, var_start), total_lines, current_path),
|
| 275 |
-
'parent_path': f"{current_path[0]} -> {var_node_id}",
|
| 276 |
-
'node_id': var_node_id
|
| 277 |
-
})
|
| 278 |
-
processed_lines.add(var_start)
|
| 279 |
-
child_parts, child_seq = parse_node(child, lines, nested_prev_end, level + 1, total_lines, current_path, counters, processed_lines)
|
| 280 |
-
parts.extend(child_parts)
|
| 281 |
-
nested_prev_end = child_parts[-1]['location'][1] if child_parts else nested_prev_end
|
| 282 |
-
|
| 283 |
-
if nested_prev_end > start_line and start_line not in processed_lines:
|
| 284 |
-
final_end = nested_prev_end
|
| 285 |
-
if start_line not in processed_lines:
|
| 286 |
-
parts[-1]['location'] = (start_line, final_end)
|
| 287 |
-
parts[-1]['source'] = ''.join(lines[start_line - 1:final_end])
|
| 288 |
-
parts[-1]['vector'] = create_vector(category, level, (start_line, final_end), total_lines, current_path)
|
| 289 |
-
processed_lines.update(range(start_line, final_end + 1))
|
| 290 |
-
|
| 291 |
-
return parts, []
|
| 292 |
-
|
| 293 |
-
def parse_python_code(code):
|
| 294 |
-
lines = code.splitlines(keepends=True)
|
| 295 |
-
total_lines = len(lines)
|
| 296 |
try:
|
| 297 |
tree = ast.parse(code)
|
| 298 |
-
except SyntaxError:
|
| 299 |
-
return
|
| 300 |
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 314 |
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
if i not in processed_lines and is_blank_or_comment(line):
|
| 318 |
-
counters = {'spacer': 0}
|
| 319 |
-
counters['spacer'] += 1
|
| 320 |
-
spacer_node_id = f"Spacer[{counters['spacer']}]"
|
| 321 |
-
parts.append({
|
| 322 |
-
'category': 'spacer',
|
| 323 |
-
'source': line,
|
| 324 |
-
'location': (i, i),
|
| 325 |
-
'level': 0,
|
| 326 |
-
'vector': create_vector('spacer', 0, (i, i), total_lines, []),
|
| 327 |
-
'parent_path': 'Top-Level',
|
| 328 |
-
'node_id': spacer_node_id
|
| 329 |
-
})
|
| 330 |
-
processed_lines.add(i)
|
| 331 |
|
| 332 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import ast
|
| 2 |
+
import hashlib
|
| 3 |
+
|
| 4 |
+
def get_category_id(category):
|
| 5 |
+
"""Maps categorical roles to integers for vector embedding."""
|
| 6 |
+
mapping = {
|
| 7 |
+
'unknown': 0, 'import': 1, 'function': 2, 'class': 3,
|
| 8 |
+
'if': 4, 'while': 5, 'for': 6, 'try': 7, 'expression': 8,
|
| 9 |
+
'spacer': 9, 'elif': 10, 'else': 11, 'except': 12,
|
| 10 |
+
'return': 13, 'assigned_variable': 14, 'variable_def': 15
|
| 11 |
+
}
|
| 12 |
+
return mapping.get(category, 0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
def create_vector(category, level, location, total_lines, parent_path):
|
| 15 |
+
"""
|
| 16 |
+
Creates a 6D normalized vector:
|
| 17 |
+
[Category, Depth, RelativeCenter, Density, ParentDepth, AncestryWeight]
|
| 18 |
+
"""
|
| 19 |
+
cat_id = get_category_id(category)
|
| 20 |
+
start, end = location
|
| 21 |
+
total_lines = max(1, total_lines)
|
| 22 |
+
|
| 23 |
+
# metrics
|
| 24 |
+
span = (end - start + 1) / total_lines
|
| 25 |
+
center = ((start + end) / 2) / total_lines
|
| 26 |
parent_depth = len(parent_path)
|
| 27 |
+
|
| 28 |
+
# Ancestry weight: Simple hash sum of parent IDs to represent unique path
|
| 29 |
+
path_str = "".join(parent_path)
|
| 30 |
+
parent_weight = (int(hashlib.md5(path_str.encode()).hexdigest(), 16) % 100) / 100.0
|
| 31 |
+
|
| 32 |
+
return [
|
| 33 |
+
cat_id,
|
| 34 |
+
level,
|
| 35 |
+
float(f"{center:.4f}"),
|
| 36 |
+
float(f"{span:.4f}"),
|
| 37 |
+
parent_depth,
|
| 38 |
+
float(f"{parent_weight:.4f}")
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
def parse_source_to_graph(code):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
try:
|
| 43 |
tree = ast.parse(code)
|
| 44 |
+
except SyntaxError as e:
|
| 45 |
+
return {"error": f"Syntax Error on line {e.lineno}: {e.msg}"}
|
| 46 |
|
| 47 |
+
lines = code.splitlines(keepends=True)
|
| 48 |
+
total_lines = len(lines)
|
| 49 |
+
nodes = []
|
| 50 |
+
|
| 51 |
+
# Recursive visitor
|
| 52 |
+
def traverse(node, parent_path=[], level=0, parent_id=None):
|
| 53 |
+
category = 'other'
|
| 54 |
+
name = getattr(node, 'name', None)
|
| 55 |
+
# Unique Node ID based on position to ensure consistency
|
| 56 |
+
node_id = f"{type(node).__name__}_{getattr(node, 'lineno', 0)}_{getattr(node, 'col_offset', 0)}"
|
| 57 |
+
|
| 58 |
+
# Categorization logic
|
| 59 |
+
if isinstance(node, (ast.Import, ast.ImportFrom)): category = 'import'; name = "import"
|
| 60 |
+
elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): category = 'function'
|
| 61 |
+
elif isinstance(node, ast.ClassDef): category = 'class'
|
| 62 |
+
elif isinstance(node, ast.If): category = 'if'; name = "if"
|
| 63 |
+
elif isinstance(node, (ast.For, ast.AsyncFor)): category = 'for'; name = "for"
|
| 64 |
+
elif isinstance(node, ast.While): category = 'while'; name = "while"
|
| 65 |
+
elif isinstance(node, ast.Return): category = 'return'; name = "return"
|
| 66 |
+
elif isinstance(node, (ast.Assign, ast.AnnAssign)): category = 'assigned_variable'; name = "assignment"
|
| 67 |
+
elif isinstance(node, ast.Expr): category = 'expression'; name = "expr"
|
| 68 |
+
elif isinstance(node, ast.Try): category = 'try'; name = "try"
|
| 69 |
+
elif isinstance(node, ast.ExceptHandler): category = 'except'; name = "except"
|
| 70 |
+
|
| 71 |
+
lineno = getattr(node, 'lineno', 0)
|
| 72 |
+
end_lineno = getattr(node, 'end_lineno', lineno)
|
| 73 |
+
|
| 74 |
+
if lineno == 0: return # Skip nodes without line numbers (e.g. Load context)
|
| 75 |
+
|
| 76 |
+
# Create source snippet
|
| 77 |
+
source_segment = "".join(lines[lineno-1:end_lineno])
|
| 78 |
+
|
| 79 |
+
# Determine Label
|
| 80 |
+
label = name if name else category
|
| 81 |
+
if category == 'assigned_variable':
|
| 82 |
+
targets = getattr(node, 'targets', []) or [getattr(node, 'target', None)]
|
| 83 |
+
if targets and isinstance(targets[0], ast.Name):
|
| 84 |
+
label = f"{targets[0].id} ="
|
| 85 |
+
|
| 86 |
+
vector = create_vector(category, level, (lineno, end_lineno), total_lines, parent_path)
|
| 87 |
+
|
| 88 |
+
node_data = {
|
| 89 |
+
"id": node_id,
|
| 90 |
+
"label": label,
|
| 91 |
+
"type": category,
|
| 92 |
+
"source": source_segment.strip(),
|
| 93 |
+
"vector": vector,
|
| 94 |
+
"level": level,
|
| 95 |
+
"lineno": lineno,
|
| 96 |
+
"parent_id": parent_id
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
# Filter: Only visualize structural elements (skip raw expressions unless useful)
|
| 100 |
+
if category != 'other':
|
| 101 |
+
nodes.append(node_data)
|
| 102 |
+
current_path = parent_path + [node_id]
|
| 103 |
+
current_parent = node_id
|
| 104 |
+
next_level = level + 1
|
| 105 |
+
else:
|
| 106 |
+
current_path = parent_path
|
| 107 |
+
current_parent = parent_id
|
| 108 |
+
next_level = level
|
| 109 |
|
| 110 |
+
for child in ast.iter_child_nodes(node):
|
| 111 |
+
traverse(child, current_path, next_level, current_parent)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 112 |
|
| 113 |
+
for node in tree.body:
|
| 114 |
+
traverse(node)
|
| 115 |
+
|
| 116 |
+
# Sort by line number for linear visual flow
|
| 117 |
+
nodes.sort(key=lambda x: x['lineno'])
|
| 118 |
+
|
| 119 |
+
return {"nodes": nodes, "connections": generate_connections(nodes)}
|
| 120 |
+
|
| 121 |
+
def generate_connections(nodes):
|
| 122 |
+
connections = []
|
| 123 |
+
node_map = {n['id']: n for n in nodes}
|
| 124 |
+
|
| 125 |
+
for node in nodes:
|
| 126 |
+
# 1. Structural Hierarchy (Tree)
|
| 127 |
+
if node['parent_id'] and node['parent_id'] in node_map:
|
| 128 |
+
connections.append({
|
| 129 |
+
"from": node['parent_id'],
|
| 130 |
+
"to": node['id'],
|
| 131 |
+
"type": "hierarchy"
|
| 132 |
+
})
|
| 133 |
+
|
| 134 |
+
return connections
|