Spaces:
Sleeping
Sleeping
import ast | |
from langchain.schema import Document | |
def chunk_python_source_code(source_code, references): | |
""" | |
Entry point method to process the Python file. | |
It invokes the iterate_ast function. | |
""" | |
print(f"Processing file: {references}") | |
for source_code, reference in zip(source_code, references): | |
print(f"Processing text: {source_code} with reference: {reference}") | |
iterate_ast(source_code, reference) | |
def iterate_ast(source_code, reference): | |
""" | |
Parses the AST of the given Python file and delegates | |
handling to specific methods based on node types. | |
""" | |
# Parse the source code into an abstract syntax tree (AST) | |
tree = ast.parse(source_code, filename=reference) | |
# Gather all top-level imports for later use | |
imports_dict = extract_imports(tree) | |
# Iterate over first-level nodes | |
for first_level_node in ast.iter_child_nodes(tree): | |
if isinstance(first_level_node, ast.ClassDef): | |
handle_first_level_class(first_level_node, source_code, imports_dict) | |
elif isinstance(first_level_node, ast.FunctionDef): | |
handle_first_level_func(first_level_node, source_code, imports_dict) | |
def extract_imports(tree): | |
""" | |
Extracts all import statements from the AST tree and organizes them | |
into a dictionary keyed by their fully qualified names for later analysis. | |
""" | |
imports_dict = {} | |
for node in ast.walk(tree): | |
if isinstance(node, ast.Import): | |
for alias in node.names: | |
imports_dict[alias.name] = alias.name | |
elif isinstance(node, ast.ImportFrom): | |
module = node.module if node.module else "" | |
for alias in node.names: | |
full_name = f"{module}.{alias.name}" if module else alias.name | |
imports_dict[alias.name] = full_name | |
return imports_dict | |
def analyze_imports(node, imports_dict): | |
""" | |
Analyzes the given node's body and signature to find relevant imports. | |
""" | |
relevant_imports = set() | |
for sub_node in ast.walk(node): | |
if isinstance(sub_node, ast.Name) and sub_node.id in imports_dict: | |
relevant_imports.add(imports_dict[sub_node.id]) | |
return list(relevant_imports) | |
def handle_first_level_class(class_node, source_code, imports_dict): | |
""" | |
Handles classes at the first level of the AST by processing | |
the class and its methods. Stores each class method in a Document object. | |
""" | |
print(f"Class detected: {class_node.name}") | |
# Extract relevant imports for this class | |
class_imports = analyze_imports(class_node, imports_dict) | |
# Extract the class source code | |
class_start_line = class_node.lineno | |
class_end_line = max( | |
[n.end_lineno for n in ast.walk(class_node) if hasattr(n, "end_lineno")], default=class_node.lineno | |
) | |
class_source = '\n'.join(source_code.splitlines()[class_start_line - 1:class_end_line]) | |
# Store the class-level Document | |
class_doc = Document( | |
page_content=class_source, | |
metadata={ | |
"type": "class", | |
"class_name": class_node.name, | |
"imports": class_imports | |
} | |
) | |
print(f"Stored Class Document: {class_doc}\n") | |
# Process methods within the class | |
for second_level_node in ast.iter_child_nodes(class_node): | |
if isinstance(second_level_node, ast.FunctionDef): | |
handle_class_method(second_level_node, class_node.name, source_code, imports_dict) | |
def handle_class_method(method_node, class_name, source_code, imports_dict): | |
""" | |
Handles methods within a class by storing them in a Document object. | |
""" | |
print(f"Method detected: {method_node.name} in class {class_name}") | |
# Extract method source code | |
method_start_line = ( | |
method_node.decorator_list[0].lineno | |
if method_node.decorator_list else method_node.lineno | |
) | |
method_end_line = method_node.end_lineno | |
method_source = '\n'.join(source_code.splitlines()[method_start_line - 1:method_end_line]) | |
# Determine visibility metadata | |
visibility = "internal" if method_node.name.startswith("_") else "public" | |
# Extract relevant imports for this method | |
method_imports = analyze_imports(method_node, imports_dict) | |
# Store the method-level Document | |
method_doc = Document( | |
page_content=method_source, | |
metadata={ | |
"type": "method", | |
"class_name": class_name, | |
"method_name": method_node.name, | |
"visibility": visibility, | |
"imports": method_imports | |
} | |
) | |
print(f"Stored Method Document: {method_doc}\n") | |
def handle_first_level_func(function_node, source_code, imports_dict): | |
""" | |
Handles functions at the first level of the AST by storing them | |
in a Document object with metadata, including relevant imports. | |
""" | |
print(f"Function detected: {function_node.name}") | |
# Extract function source code | |
function_start_line = ( | |
function_node.decorator_list[0].lineno | |
if function_node.decorator_list else function_node.lineno | |
) | |
function_end_line = function_node.end_lineno | |
function_source = '\n'.join(source_code.splitlines()[function_start_line - 1:function_end_line]) | |
# Determine visibility metadata | |
visibility = "internal" if function_node.name.startswith("_") else "public" | |
# Extract relevant imports for this function | |
function_imports = analyze_imports(function_node, imports_dict) | |
# Store the function-level Document | |
function_doc = Document( | |
page_content=function_source, | |
metadata={ | |
"type": "function", | |
"function_name": function_node.name, | |
"visibility": visibility, | |
"imports": function_imports | |
} | |
) | |
print(f"Stored Function Document: {function_doc}\n") |