Spaces:
Sleeping
Sleeping
File size: 5,891 Bytes
6f6019f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
import ast
from langchain.schema import Document
def chunk_python_source_code(source_code, references):
"""
Entry point method to process the Python file.
It invokes the iterate_ast function.
"""
print(f"Processing file: {references}")
for source_code, reference in zip(source_code, references):
print(f"Processing text: {source_code} with reference: {reference}")
iterate_ast(source_code, reference)
def iterate_ast(source_code, reference):
"""
Parses the AST of the given Python file and delegates
handling to specific methods based on node types.
"""
# Parse the source code into an abstract syntax tree (AST)
tree = ast.parse(source_code, filename=reference)
# Gather all top-level imports for later use
imports_dict = extract_imports(tree)
# Iterate over first-level nodes
for first_level_node in ast.iter_child_nodes(tree):
if isinstance(first_level_node, ast.ClassDef):
handle_first_level_class(first_level_node, source_code, imports_dict)
elif isinstance(first_level_node, ast.FunctionDef):
handle_first_level_func(first_level_node, source_code, imports_dict)
def extract_imports(tree):
"""
Extracts all import statements from the AST tree and organizes them
into a dictionary keyed by their fully qualified names for later analysis.
"""
imports_dict = {}
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for alias in node.names:
imports_dict[alias.name] = alias.name
elif isinstance(node, ast.ImportFrom):
module = node.module if node.module else ""
for alias in node.names:
full_name = f"{module}.{alias.name}" if module else alias.name
imports_dict[alias.name] = full_name
return imports_dict
def analyze_imports(node, imports_dict):
"""
Analyzes the given node's body and signature to find relevant imports.
"""
relevant_imports = set()
for sub_node in ast.walk(node):
if isinstance(sub_node, ast.Name) and sub_node.id in imports_dict:
relevant_imports.add(imports_dict[sub_node.id])
return list(relevant_imports)
def handle_first_level_class(class_node, source_code, imports_dict):
"""
Handles classes at the first level of the AST by processing
the class and its methods. Stores each class method in a Document object.
"""
print(f"Class detected: {class_node.name}")
# Extract relevant imports for this class
class_imports = analyze_imports(class_node, imports_dict)
# Extract the class source code
class_start_line = class_node.lineno
class_end_line = max(
[n.end_lineno for n in ast.walk(class_node) if hasattr(n, "end_lineno")], default=class_node.lineno
)
class_source = '\n'.join(source_code.splitlines()[class_start_line - 1:class_end_line])
# Store the class-level Document
class_doc = Document(
page_content=class_source,
metadata={
"type": "class",
"class_name": class_node.name,
"imports": class_imports
}
)
print(f"Stored Class Document: {class_doc}\n")
# Process methods within the class
for second_level_node in ast.iter_child_nodes(class_node):
if isinstance(second_level_node, ast.FunctionDef):
handle_class_method(second_level_node, class_node.name, source_code, imports_dict)
def handle_class_method(method_node, class_name, source_code, imports_dict):
"""
Handles methods within a class by storing them in a Document object.
"""
print(f"Method detected: {method_node.name} in class {class_name}")
# Extract method source code
method_start_line = (
method_node.decorator_list[0].lineno
if method_node.decorator_list else method_node.lineno
)
method_end_line = method_node.end_lineno
method_source = '\n'.join(source_code.splitlines()[method_start_line - 1:method_end_line])
# Determine visibility metadata
visibility = "internal" if method_node.name.startswith("_") else "public"
# Extract relevant imports for this method
method_imports = analyze_imports(method_node, imports_dict)
# Store the method-level Document
method_doc = Document(
page_content=method_source,
metadata={
"type": "method",
"class_name": class_name,
"method_name": method_node.name,
"visibility": visibility,
"imports": method_imports
}
)
print(f"Stored Method Document: {method_doc}\n")
def handle_first_level_func(function_node, source_code, imports_dict):
"""
Handles functions at the first level of the AST by storing them
in a Document object with metadata, including relevant imports.
"""
print(f"Function detected: {function_node.name}")
# Extract function source code
function_start_line = (
function_node.decorator_list[0].lineno
if function_node.decorator_list else function_node.lineno
)
function_end_line = function_node.end_lineno
function_source = '\n'.join(source_code.splitlines()[function_start_line - 1:function_end_line])
# Determine visibility metadata
visibility = "internal" if function_node.name.startswith("_") else "public"
# Extract relevant imports for this function
function_imports = analyze_imports(function_node, imports_dict)
# Store the function-level Document
function_doc = Document(
page_content=function_source,
metadata={
"type": "function",
"function_name": function_node.name,
"visibility": visibility,
"imports": function_imports
}
)
print(f"Stored Function Document: {function_doc}\n") |