Spaces:
Sleeping
Sleeping
Update analyzer.py
Browse files- analyzer.py +8 -2
analyzer.py
CHANGED
@@ -129,7 +129,7 @@ def aggregate_chunk_analyses(chunk_jsons: list) -> str:
|
|
129 |
def analyze_combined_file(output_file="combined_repo.txt"):
|
130 |
"""
|
131 |
Reads the combined file, splits it into 500-line chunks, analyzes each chunk, and aggregates the LLM's output into a final summary.
|
132 |
-
Returns the aggregated analysis as a string.
|
133 |
"""
|
134 |
try:
|
135 |
with open(output_file, "r", encoding="utf-8") as f:
|
@@ -141,6 +141,12 @@ def analyze_combined_file(output_file="combined_repo.txt"):
|
|
141 |
analysis = analyze_code_chunk(chunk)
|
142 |
chunk_jsons.append(analysis)
|
143 |
final_summary = aggregate_chunk_analyses(chunk_jsons)
|
144 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
except Exception as e:
|
146 |
return f"Error analyzing combined file: {e}"
|
|
|
129 |
def analyze_combined_file(output_file="combined_repo.txt"):
|
130 |
"""
|
131 |
Reads the combined file, splits it into 500-line chunks, analyzes each chunk, and aggregates the LLM's output into a final summary.
|
132 |
+
Returns the chunk JSONs (for debugging) and the aggregated analysis as a string.
|
133 |
"""
|
134 |
try:
|
135 |
with open(output_file, "r", encoding="utf-8") as f:
|
|
|
141 |
analysis = analyze_code_chunk(chunk)
|
142 |
chunk_jsons.append(analysis)
|
143 |
final_summary = aggregate_chunk_analyses(chunk_jsons)
|
144 |
+
debug_output = (
|
145 |
+
"==== Chunk JSON Outputs ===="
|
146 |
+
+ "\n\n".join([f"Chunk {i+1} JSON:\n{chunk_jsons[i]}" for i in range(len(chunk_jsons))])
|
147 |
+
+ "\n\n==== Final Aggregated Summary ===="
|
148 |
+
+ f"\n{final_summary}"
|
149 |
+
)
|
150 |
+
return debug_output
|
151 |
except Exception as e:
|
152 |
return f"Error analyzing combined file: {e}"
|