import logging import json from rule_extractor import get_rules_from_url, format_rules_for_display from doc_analyzer import analyze_document logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') def combine_rules(url_rules, pasted_rules): """Combine URL-extracted rules and manually pasted rules""" combined_rules = "" if url_rules and (url_rules.strip().startswith('[') or url_rules.strip().startswith('{')): try: rules_data = json.loads(url_rules) if isinstance(rules_data, list) and len(rules_data) > 0: rules_data = rules_data[0] url_rules = format_rules_for_display(rules_data) except Exception as e: logging.error(f"Failed to parse URL rules as JSON: {e}") if url_rules: combined_rules += url_rules if pasted_rules: if url_rules: combined_rules += "\n\n## Additional Manually Pasted Rules\n\n" + pasted_rules else: combined_rules = "# Manually Pasted Rules\n\n" + pasted_rules return combined_rules def extract_rules(journal_url): """Extract formatting rules from a given URL.""" try: logging.info(f"Extracting rules from URL: {journal_url}") rules = get_rules_from_url(journal_url) logging.info("Successfully extracted rules from URL.") return rules except Exception as e: logging.error(f"Error extracting rules from URL: {e}") return {"error": f"Failed to extract rules from URL: {e}"} def analyze_uploaded_document(uploaded_file, rules): """Analyze the uploaded document against the provided rules.""" try: logging.info(f"Analyzing document: {uploaded_file.name}") results = analyze_document(uploaded_file, rules) logging.info("Successfully analyzed document.") return results except Exception as e: logging.error(f"Error analyzing document: {e}") return {"error": f"Failed to analyze document: {e}"}