|
import json |
|
import time |
|
import logging |
|
from datetime import datetime |
|
import requests |
|
from flask import Flask, jsonify |
|
from apscheduler.schedulers.background import BackgroundScheduler |
|
from db import paires, deeper |
|
import ai |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(levelname)s - %(message)s' |
|
) |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
app = Flask(__name__) |
|
|
|
def fetch_pairs_configuration(): |
|
"""Fetch the current pairs configuration from GitHub""" |
|
result = paires.fetch_json_from_github() |
|
|
|
if result["success"]: |
|
config = result["data"] |
|
if not config: |
|
config = {"status": False, "GROUPS": {}} |
|
return config |
|
else: |
|
logger.error(f"Error fetching pairs configuration: {result['message']}") |
|
return {"status": False, "GROUPS": {}} |
|
|
|
def fetch_deeper_configuration(): |
|
"""Fetch the current deeper analysis configuration from GitHub""" |
|
result = deeper.fetch_json_from_github() |
|
|
|
if result["success"]: |
|
config = result["data"] |
|
if not config: |
|
config = {"status": True, "forwards": {}} |
|
return config |
|
else: |
|
logger.error(f"Error fetching deeper configuration: {result['message']}") |
|
return {"status": True, "forwards": {}} |
|
|
|
def save_deeper_configuration(config): |
|
"""Save the deeper configuration to GitHub""" |
|
|
|
formatted_json = json.dumps(config, separators=(',', ':')) |
|
|
|
|
|
auth_token, commit_oid = deeper.fetch_authenticity_token_and_commit_oid() |
|
|
|
if auth_token and commit_oid: |
|
|
|
result = deeper.update_user_json_file(auth_token, commit_oid, formatted_json) |
|
if result["success"]: |
|
logger.info("Deeper analysis saved successfully!") |
|
return True |
|
else: |
|
logger.error(f"Error saving deeper analysis: {result['message']}") |
|
return False |
|
else: |
|
logger.error("Failed to get authentication token or commit ID. Deeper analysis not saved.") |
|
return False |
|
|
|
def extract_forward_content(analysis_text): |
|
"""Extract content between <Forward> and </Forward> tags""" |
|
start_tag = "<Forward>" |
|
end_tag = "</Forward>" |
|
|
|
start_index = analysis_text.find(start_tag) |
|
if start_index == -1: |
|
return None |
|
|
|
start_index += len(start_tag) |
|
end_index = analysis_text.find(end_tag, start_index) |
|
|
|
if end_index == -1: |
|
return None |
|
|
|
return analysis_text[start_index:end_index].strip() |
|
|
|
def analyze_forex_groups(): |
|
"""Main function to analyze forex groups and update deeper analysis""" |
|
logger.info("Starting forex group analysis...") |
|
|
|
try: |
|
|
|
pairs_config = fetch_pairs_configuration() |
|
|
|
|
|
if not pairs_config.get("status", False): |
|
logger.info("Trading is currently disabled. No analysis will be performed.") |
|
return |
|
|
|
|
|
groups = pairs_config.get("GROUPS", {}) |
|
if not groups: |
|
logger.info("No groups configured for analysis.") |
|
return |
|
|
|
|
|
new_forwards = {} |
|
|
|
for group_id, group_data in groups.items(): |
|
logger.info(f"Analyzing group {group_id}...") |
|
try: |
|
|
|
pairs = group_data.get("pairs", []) |
|
description = group_data.get("description", "") |
|
relationships = group_data.get("relationships", "") |
|
|
|
if not pairs: |
|
logger.warning(f"No pairs defined for group {group_id}, skipping.") |
|
continue |
|
|
|
|
|
analysis = ai.analyze_forex_group(pairs, description, relationships) |
|
logger.info(f"Analysis received for group {group_id}") |
|
|
|
|
|
forward_content = extract_forward_content(analysis) |
|
|
|
if forward_content: |
|
logger.info(f"Forward-looking analysis found for group {group_id}") |
|
|
|
new_forwards[group_id] = { |
|
"pairs": pairs, |
|
"message": forward_content |
|
} |
|
else: |
|
logger.info(f"No forward-looking analysis for group {group_id}") |
|
|
|
except Exception as e: |
|
logger.error(f"Error analyzing group {group_id}: {str(e)}", exc_info=True) |
|
|
|
continue |
|
|
|
|
|
if new_forwards: |
|
logger.info(f"Saving forward analysis for {len(new_forwards)} groups...") |
|
|
|
|
|
deeper_config = fetch_deeper_configuration() |
|
|
|
|
|
if "status" not in deeper_config: |
|
deeper_config["status"] = True |
|
|
|
|
|
deeper_config["forwards"] = new_forwards |
|
|
|
|
|
save_deeper_configuration(deeper_config) |
|
logger.info("All forward analyses saved successfully") |
|
else: |
|
logger.info("No forward-looking analyses found for any groups") |
|
|
|
logger.info("Analysis cycle completed successfully") |
|
|
|
except Exception as e: |
|
logger.error(f"Unexpected error in analyze_forex_groups: {str(e)}", exc_info=True) |
|
|
|
|
|
@app.route('/') |
|
def health_check(): |
|
"""Health check endpoint to verify the service is running""" |
|
return jsonify({ |
|
"status": "running", |
|
"message": "Forex Analysis System is active", |
|
"time": datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
}) |
|
|
|
@app.route('/analyze/now') |
|
def trigger_analysis(): |
|
"""Endpoint to manually trigger analysis""" |
|
try: |
|
analyze_forex_groups() |
|
return jsonify({ |
|
"status": "success", |
|
"message": "Analysis triggered successfully", |
|
"time": datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
}) |
|
except Exception as e: |
|
logger.error(f"Error triggering analysis: {e}", exc_info=True) |
|
return jsonify({ |
|
"status": "error", |
|
"message": f"Error triggering analysis: {str(e)}" |
|
}), 500 |
|
|
|
@app.route('/status') |
|
def get_status(): |
|
"""Endpoint to get system status""" |
|
try: |
|
pairs_config = fetch_pairs_configuration() |
|
deeper_config = fetch_deeper_configuration() |
|
|
|
|
|
total_pairs = 0 |
|
for group_data in pairs_config.get("GROUPS", {}).values(): |
|
total_pairs += len(group_data.get("pairs", [])) |
|
|
|
return jsonify({ |
|
"service_status": "running", |
|
"trading_enabled": pairs_config.get("status", False), |
|
"groups_count": len(pairs_config.get("GROUPS", {})), |
|
"total_pairs_count": total_pairs, |
|
"deeper_analysis_enabled": deeper_config.get("status", True), |
|
"forwards_count": len(deeper_config.get("forwards", {})), |
|
"time": datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
}) |
|
except Exception as e: |
|
logger.error(f"Error getting status: {e}", exc_info=True) |
|
return jsonify({ |
|
"service_status": "running", |
|
"error": str(e) |
|
}) |
|
|
|
|
|
scheduler = BackgroundScheduler(daemon=True) |
|
|
|
def start_scheduler(): |
|
"""Start the scheduler with the analysis job""" |
|
logger.info("Starting scheduler for forex analysis") |
|
|
|
|
|
scheduler.add_job( |
|
analyze_forex_groups, |
|
'interval', |
|
hours=1, |
|
id='forex_analysis', |
|
replace_existing=True, |
|
next_run_time=datetime.now() |
|
) |
|
|
|
|
|
if not scheduler.running: |
|
scheduler.start() |
|
logger.info("Scheduler started successfully") |
|
|
|
if __name__ == "__main__": |
|
logger.info("Starting Forex Analysis System...") |
|
|
|
|
|
start_scheduler() |
|
|
|
|
|
app.run(host='0.0.0.0', port=7860, debug=False) |