Dooratre commited on
Commit
1c5e919
·
verified ·
1 Parent(s): cc80721

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +321 -150
app.py CHANGED
@@ -1,12 +1,13 @@
1
- import json
2
  import time
3
- import logging
4
- from datetime import datetime
5
  import requests
 
 
6
  from flask import Flask, jsonify
7
  from apscheduler.schedulers.background import BackgroundScheduler
8
- from db import paires, deeper
9
- import ai
 
10
 
11
  # Configure logging
12
  logging.basicConfig(
@@ -18,150 +19,289 @@ logger = logging.getLogger(__name__)
18
  # Initialize Flask app
19
  app = Flask(__name__)
20
 
21
- def fetch_pairs_configuration():
22
- """Fetch the current pairs configuration from GitHub"""
23
- result = paires.fetch_json_from_github()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
- if result["success"]:
26
- config = result["data"]
27
- if not config: # If the JSON is empty
28
- config = {"status": False, "GROUPS": {}}
29
- return config
30
- else:
31
- logger.error(f"Error fetching pairs configuration: {result['message']}")
32
- return {"status": False, "GROUPS": {}}
33
-
34
- def fetch_deeper_configuration():
35
- """Fetch the current deeper analysis configuration from GitHub"""
36
- result = deeper.fetch_json_from_github()
37
-
38
- if result["success"]:
39
- config = result["data"]
40
- if not config: # If the JSON is empty
41
- config = {"status": True, "forwards": {}}
42
- return config
43
- else:
44
- logger.error(f"Error fetching deeper configuration: {result['message']}")
45
- return {"status": True, "forwards": {}}
46
 
47
- def save_deeper_configuration(config):
48
- """Save the deeper configuration to GitHub"""
49
- # Format the JSON as a single line with no indentation or whitespace
50
- formatted_json = json.dumps(config, separators=(',', ':'))
51
 
52
- # Get authentication token and commit ID
53
- auth_token, commit_oid = deeper.fetch_authenticity_token_and_commit_oid()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
55
- if auth_token and commit_oid:
56
- # Update the GitHub file
57
- result = deeper.update_user_json_file(auth_token, commit_oid, formatted_json)
58
- if result["success"]:
59
- logger.info("Deeper analysis saved successfully!")
 
 
 
 
 
 
 
 
60
  return True
61
- else:
62
- logger.error(f"Error saving deeper analysis: {result['message']}")
63
- return False
64
- else:
65
- logger.error("Failed to get authentication token or commit ID. Deeper analysis not saved.")
 
 
 
 
 
66
  return False
67
 
68
- def extract_forward_content(analysis_text):
69
- """Extract content between <Forward> and </Forward> tags"""
70
- start_tag = "<Forward>"
71
- end_tag = "</Forward>"
72
 
73
- start_index = analysis_text.find(start_tag)
74
- if start_index == -1:
75
- return None
 
76
 
77
- start_index += len(start_tag)
78
- end_index = analysis_text.find(end_tag, start_index)
79
 
80
- if end_index == -1:
81
- return None
82
 
83
- return analysis_text[start_index:end_index].strip()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
 
85
  def analyze_forex_groups():
86
- """Main function to analyze forex groups and update deeper analysis"""
87
- logger.info("Starting forex group analysis...")
88
 
89
  try:
90
- # Fetch current configurations
91
- pairs_config = fetch_pairs_configuration()
92
 
93
- # Check if trading is enabled
94
- if not pairs_config.get("status", False):
95
- logger.info("Trading is currently disabled. No analysis will be performed.")
96
  return
97
 
98
- # Process each group
99
- groups = pairs_config.get("GROUPS", {})
100
- if not groups:
101
- logger.info("No groups configured for analysis.")
 
102
  return
103
 
104
- # Create a new forwards dictionary to replace the existing one
105
- new_forwards = {}
106
-
107
- for group_id, group_data in groups.items():
108
- logger.info(f"Analyzing group {group_id}...")
109
- try:
110
- # Extract group information
111
- pairs = group_data.get("pairs", [])
112
- description = group_data.get("description", "")
113
- relationships = group_data.get("relationships", "")
114
-
115
- if not pairs:
116
- logger.warning(f"No pairs defined for group {group_id}, skipping.")
117
- continue
118
-
119
- # Get AI analysis for the group
120
- analysis = ai.analyze_forex_group(pairs, description, relationships)
121
- logger.info(f"Analysis received for group {group_id}")
122
-
123
- # Extract forward-looking content if present
124
- forward_content = extract_forward_content(analysis)
125
-
126
- if forward_content:
127
- logger.info(f"Forward-looking analysis found for group {group_id}")
128
- # Add to the new forwards dictionary with group ID as key
129
- new_forwards[group_id] = {
130
- "pairs": pairs,
131
- "message": forward_content
132
- }
133
  else:
134
- logger.info(f"No forward-looking analysis for group {group_id}")
135
 
136
- except Exception as e:
137
- logger.error(f"Error analyzing group {group_id}: {str(e)}", exc_info=True)
138
- # Continue with next group instead of stopping the entire process
139
  continue
140
 
141
- # After collecting all analyses, update the deeper configuration in a single operation
142
- if new_forwards:
143
- logger.info(f"Saving forward analysis for {len(new_forwards)} groups...")
144
 
145
- # Fetch the latest deeper configuration
146
- deeper_config = fetch_deeper_configuration()
147
 
148
- # Ensure deeper config has the correct structure
149
- if "status" not in deeper_config:
150
- deeper_config["status"] = True
151
 
152
- # Replace the entire forwards section with our new data
153
- deeper_config["forwards"] = new_forwards
 
154
 
155
- # Save the configuration
156
- save_deeper_configuration(deeper_config)
157
- logger.info("All forward analyses saved successfully")
158
- else:
159
- logger.info("No forward-looking analyses found for any groups")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
 
161
  logger.info("Analysis cycle completed successfully")
162
 
163
  except Exception as e:
164
- logger.error(f"Unexpected error in analyze_forex_groups: {str(e)}", exc_info=True)
165
 
166
  # Flask routes
167
  @app.route('/')
@@ -169,8 +309,7 @@ def health_check():
169
  """Health check endpoint to verify the service is running"""
170
  return jsonify({
171
  "status": "running",
172
- "message": "Forex Analysis System is active",
173
- "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
174
  })
175
 
176
  @app.route('/analyze/now')
@@ -180,8 +319,7 @@ def trigger_analysis():
180
  analyze_forex_groups()
181
  return jsonify({
182
  "status": "success",
183
- "message": "Analysis triggered successfully",
184
- "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
185
  })
186
  except Exception as e:
187
  logger.error(f"Error triggering analysis: {e}", exc_info=True)
@@ -194,23 +332,18 @@ def trigger_analysis():
194
  def get_status():
195
  """Endpoint to get system status"""
196
  try:
197
- pairs_config = fetch_pairs_configuration()
198
- deeper_config = fetch_deeper_configuration()
199
-
200
- # Count all pairs across all groups
201
- total_pairs = 0
202
- for group_data in pairs_config.get("GROUPS", {}).values():
203
- total_pairs += len(group_data.get("pairs", []))
204
-
205
- return jsonify({
206
- "service_status": "running",
207
- "trading_enabled": pairs_config.get("status", False),
208
- "groups_count": len(pairs_config.get("GROUPS", {})),
209
- "total_pairs_count": total_pairs,
210
- "deeper_analysis_enabled": deeper_config.get("status", True),
211
- "forwards_count": len(deeper_config.get("forwards", {})),
212
- "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
213
- })
214
  except Exception as e:
215
  logger.error(f"Error getting status: {e}", exc_info=True)
216
  return jsonify({
@@ -218,23 +351,61 @@ def get_status():
218
  "error": str(e)
219
  })
220
 
221
- # Initialize scheduler
222
- scheduler = BackgroundScheduler(daemon=True)
 
 
 
 
 
 
 
 
 
 
 
 
223
 
224
- def start_scheduler():
225
- """Start the scheduler with the analysis job"""
226
- logger.info("Starting scheduler for forex analysis")
 
 
 
 
 
 
 
227
 
228
- # Schedule the analysis function to run every hour
229
  scheduler.add_job(
230
- analyze_forex_groups, # Changed from analyze_forex_pairs to analyze_forex_groups
231
- 'interval',
232
- hours=1,
233
- id='forex_analysis',
234
- replace_existing=True,
235
- next_run_time=datetime.now() # Run immediately on startup
236
  )
237
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
  # Start the scheduler if it's not already running
239
  if not scheduler.running:
240
  scheduler.start()
 
 
1
  import time
2
+ import json
 
3
  import requests
4
+ import re
5
+ import logging
6
  from flask import Flask, jsonify
7
  from apscheduler.schedulers.background import BackgroundScheduler
8
+ from datetime import datetime, timedelta
9
+ from db import deeper, signals
10
+ from ai import analyze_forex_pairs
11
 
12
  # Configure logging
13
  logging.basicConfig(
 
19
  # Initialize Flask app
20
  app = Flask(__name__)
21
 
22
+ # Telegram configuration
23
+ TELEGRAM_TOKEN = "7750258010:AAEfEn1Hc1h0n6uRc1KcPdZf7ozBEkehnEY"
24
+ TELEGRAM_CHAT_ID = "6859142642"
25
+ TELEGRAM_CHAT_ID2 = "5666511049"
26
+
27
+ def send_telegram_message(message):
28
+ """Send message to Telegram user"""
29
+ url = f"https://api.telegram.org/bot{TELEGRAM_TOKEN}/sendMessage"
30
+ data = {
31
+ "chat_id": TELEGRAM_CHAT_ID,
32
+ "text": message,
33
+ "parse_mode": "HTML"
34
+ }
35
+ data2 = {
36
+ "chat_id": TELEGRAM_CHAT_ID2,
37
+ "text": message,
38
+ "parse_mode": "HTML"
39
+ }
40
+ try:
41
+ logger.info(f"Sending Telegram message to chat ID: {TELEGRAM_CHAT_ID}")
42
+ response = requests.post(url, data=data)
43
+ response2 = requests.post(url, data=data2)
44
+ response_json = response.json()
45
+
46
+ if response_json.get("ok"):
47
+ logger.info("Telegram message sent successfully")
48
+ return response_json
49
+ else:
50
+ logger.error(f"Telegram API error: {response_json.get('description', 'Unknown error')}")
51
+ return None
52
+ except Exception as e:
53
+ logger.error(f"Error sending Telegram message: {e}", exc_info=True)
54
+ return None
55
 
56
+ def get_active_signals():
57
+ """Get all active signals from the signals database"""
58
+ signals_response = signals.fetch_json_from_github()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
+ if not signals_response["success"]:
61
+ logger.error(f"Error fetching signals data: {signals_response['message']}")
62
+ return "لا توجد صفقات نشطة حالياً."
 
63
 
64
+ signals_data = signals_response["data"]
65
+
66
+ # Defensive check - ensure signals_data is a list
67
+ if not isinstance(signals_data, list) or len(signals_data) == 0:
68
+ logger.warning("No active signals found in database")
69
+ return "لا توجد صفقات نشطة حالياً."
70
+
71
+ deals_string = []
72
+ for signal in signals_data:
73
+ deal = f"الزوج: {signal.get('pair', '')}, "
74
+ deal += f"النوع: {signal.get('type', '')}, "
75
+ deal += f"سعر الدخول: {signal.get('entry', '')}, "
76
+ deal += f"وقف الخسارة: {signal.get('stop_loss', '')}, "
77
+ deal += f"الهدف: {signal.get('take_profit', '')}"
78
+ deals_string.append(deal)
79
+
80
+ if not deals_string:
81
+ return "لا توجد صفقات نشطة حالياً."
82
+
83
+ return "\n".join(deals_string)
84
+
85
+ def extract_signal_from_ai_response(response):
86
+ """Extract signal data from AI response if present"""
87
+ signal_pattern = r'<signal>(.*?)</signal>'
88
+ match = re.search(signal_pattern, response, re.DOTALL)
89
+
90
+ if not match:
91
+ return None
92
+
93
+ signal_text = match.group(1)
94
+
95
+ # Extract individual fields from signal - full format
96
+ pair_match = re.search(r'<pair>(.*?)</pair>', signal_text, re.DOTALL)
97
+ type_match = re.search(r'<type>(.*?)</type>', signal_text, re.DOTALL)
98
+ entry_match = re.search(r'<entry>(.*?)</entry>', signal_text, re.DOTALL)
99
+ stop_loss_match = re.search(r'<stop_loss>(.*?)</stop_loss>', signal_text, re.DOTALL)
100
+ take_profit_match = re.search(r'<take_profit>(.*?)</take_profit>', signal_text, re.DOTALL)
101
+ duration_match = re.search(r'<duration>(.*?)</duration>', signal_text, re.DOTALL)
102
+ reason_match = re.search(r'<reason>(.*?)</reason>', signal_text, re.DOTALL)
103
+
104
+ # Create signal dictionary
105
+ signal_data = {
106
+ "pair": pair_match.group(1).strip() if pair_match else "",
107
+ "timeframe": "15min", # Default timeframe
108
+ "type": type_match.group(1).strip() if type_match else "",
109
+ "entry": entry_match.group(1).strip() if entry_match else "",
110
+ "stop_loss": stop_loss_match.group(1).strip() if stop_loss_match else "",
111
+ "take_profit": take_profit_match.group(1).strip() if take_profit_match else "",
112
+ "duration": duration_match.group(1).strip() if duration_match else "1-3 ساعات",
113
+ "reason": reason_match.group(1).strip() if reason_match else "تم التحليل بواسطة النظام الآلي",
114
+ "status": "starting"
115
+ }
116
+
117
+ return signal_data
118
+
119
+ def check_if_pairs_exist_in_signals(pairs_list):
120
+ """Check if a list of pairs already exists in the signals database"""
121
+ signals_response = signals.fetch_json_from_github()
122
+
123
+ if not signals_response["success"]:
124
+ logger.error(f"Error fetching signals data: {signals_response['message']}")
125
+ return False
126
 
127
+ signals_data = signals_response["data"]
128
+
129
+ # Defensive check - ensure signals_data is a list
130
+ if not isinstance(signals_data, list):
131
+ logger.warning(f"signals_data is not a list (type: {type(signals_data)}), converting to empty list")
132
+ signals_data = []
133
+
134
+ # Convert pairs_list to a sorted JSON string for consistent comparison
135
+ pairs_json = json.dumps(sorted(pairs_list), ensure_ascii=False)
136
+
137
+ # Check if pairs list exists
138
+ for signal in signals_data:
139
+ if "pairs" in signal and json.dumps(sorted(signal.get("pairs", [])), ensure_ascii=False) == pairs_json:
140
  return True
141
+
142
+ return False
143
+
144
+ def update_signals_file(signal_data, pairs_list):
145
+ """Update signals JSON file with new signal data including pairs list"""
146
+ # Fetch current signals data
147
+ signals_response = signals.fetch_json_from_github()
148
+
149
+ if not signals_response["success"]:
150
+ logger.error(f"Error fetching signals data: {signals_response['message']}")
151
  return False
152
 
153
+ signals_data = signals_response["data"]
 
 
 
154
 
155
+ # Defensive check - ensure signals_data is a list
156
+ if not isinstance(signals_data, list):
157
+ logger.warning(f"signals_data is not a list (type: {type(signals_data)}), converting to empty list")
158
+ signals_data = []
159
 
160
+ # Add pairs list to the signal data
161
+ signal_data["pairs"] = pairs_list
162
 
163
+ # Add new signal
164
+ signals_data.append(signal_data)
165
 
166
+ # Get authenticity token and commit OID for signals file
167
+ auth_token, commit_oid = signals.fetch_authenticity_token_and_commit_oid()
168
+ if not auth_token or not commit_oid:
169
+ logger.error("Failed to get authenticity token or commit OID for signals file")
170
+ return False
171
+
172
+ # Update the signals file - using json.dumps with separators to ensure a single line
173
+ update_response = signals.update_user_json_file(
174
+ auth_token,
175
+ commit_oid,
176
+ json.dumps(signals_data, ensure_ascii=False, separators=(',', ':'))
177
+ )
178
+
179
+ return update_response["success"]
180
+
181
+ def remove_group_from_deeper(group_key):
182
+ """Remove analyzed group from deeper.json file"""
183
+ # Fetch current deeper data
184
+ deeper_response = deeper.fetch_json_from_github()
185
+
186
+ if not deeper_response["success"]:
187
+ logger.error(f"Error fetching deeper data: {deeper_response['message']}")
188
+ return False
189
+
190
+ deeper_data = deeper_response["data"]
191
+
192
+ # Remove the group from forwards if it exists
193
+ if group_key in deeper_data.get("forwards", {}):
194
+ del deeper_data["forwards"][group_key]
195
+
196
+ # Get authenticity token and commit OID for deeper file
197
+ auth_token, commit_oid = deeper.fetch_authenticity_token_and_commit_oid()
198
+ if not auth_token or not commit_oid:
199
+ logger.error("Failed to get authenticity token or commit OID for deeper file")
200
+ return False
201
+
202
+ # Update the deeper file - using json.dumps with separators to ensure a single line
203
+ update_response = deeper.update_user_json_file(
204
+ auth_token,
205
+ commit_oid,
206
+ json.dumps(deeper_data, ensure_ascii=False, separators=(',', ':'))
207
+ )
208
+
209
+ return update_response["success"]
210
+
211
+ def format_telegram_message(signal_data):
212
+ """تنسيق إشارة الفوركس للإرسال عبر تيليجرام - بصيغة احترافية"""
213
+ message = "🔔 <b>إشارة فوركس جديدة</b> 🔔\n\n"
214
+ message += f"<b>🔹 الزوج:</b> {signal_data['pair']}\n"
215
+ message += f"<b>📊 النوع:</b> {signal_data['type']}\n"
216
+ message += f"<b>🎯 الدخول:</b> {signal_data['entry']}\n"
217
+ message += f"<b>🛡️ وقف الخسارة:</b> {signal_data['stop_loss']}\n"
218
+ message += f"<b>✨ الهدف:</b> {signal_data['take_profit']}\n"
219
+
220
+ return message
221
 
222
  def analyze_forex_groups():
223
+ """Function to analyze forex groups from the deeper.json file"""
224
+ logger.info("Starting forex group analysis cycle")
225
 
226
  try:
227
+ # Fetch data from deeper.json
228
+ deeper_response = deeper.fetch_json_from_github()
229
 
230
+ if not deeper_response["success"]:
231
+ logger.error(f"Error fetching deeper data: {deeper_response['message']}")
 
232
  return
233
 
234
+ deeper_data = deeper_response["data"]
235
+
236
+ # Check if system is enabled
237
+ if not deeper_data.get("status", False):
238
+ logger.info("System is currently turned OFF. Please turn it ON to continue.")
239
  return
240
 
241
+ # Get active signals to pass to AI
242
+ active_deals = get_active_signals()
243
+ logger.info(f"Active deals: {active_deals}")
244
+
245
+ # Process each forex group in the forwards section
246
+ for group_key, group_data in deeper_data.get("forwards", {}).items():
247
+ pairs = group_data.get("pairs", [])
248
+ message = group_data.get("message", "")
249
+
250
+ if not pairs:
251
+ logger.warning(f"Group {group_key} has no pairs. Skipping.")
252
+ continue
253
+
254
+ logger.info(f"Analyzing group {group_key} with pairs: {', '.join(pairs)}")
255
+
256
+ # Check if this group already exists in signals database
257
+ if check_if_pairs_exist_in_signals(pairs):
258
+ logger.info(f"Signal for group {group_key} already exists in database. Skipping analysis.")
259
+
260
+ # Remove the group from deeper.json since we're not processing it
261
+ if remove_group_from_deeper(group_key):
262
+ logger.info(f"Group {group_key} removed from deeper.json successfully")
 
 
 
 
 
 
 
263
  else:
264
+ logger.error(f"Failed to remove group {group_key} from deeper.json")
265
 
 
 
 
266
  continue
267
 
268
+ # Call AI to analyze the forex pairs with active deals
269
+ ai_response = analyze_forex_pairs(pairs, message, active_deals)
 
270
 
271
+ # Check if the AI response contains a signal
272
+ signal_data = extract_signal_from_ai_response(ai_response)
273
 
274
+ if signal_data:
275
+ logger.info(f"Signal detected for group {group_key}")
 
276
 
277
+ # Update signals file with the new signal, including pairs list
278
+ if update_signals_file(signal_data, pairs):
279
+ logger.info(f"Signal for group {group_key} saved successfully")
280
 
281
+ # Format and send Telegram message AFTER successfully saving to database
282
+ telegram_message = format_telegram_message(signal_data)
283
+ logger.info("Attempting to send Telegram message...")
284
+ telegram_response = send_telegram_message(telegram_message)
285
+
286
+ if telegram_response and telegram_response.get("ok"):
287
+ logger.info(f"Telegram message for group {group_key} sent successfully")
288
+ else:
289
+ logger.error(f"Failed to send Telegram message for group {group_key}. Response: {telegram_response}")
290
+
291
+ # Remove the group from deeper.json
292
+ if remove_group_from_deeper(group_key):
293
+ logger.info(f"Group {group_key} removed from deeper.json successfully")
294
+ else:
295
+ logger.error(f"Failed to remove group {group_key} from deeper.json")
296
+ else:
297
+ logger.error(f"Failed to save signal for group {group_key}")
298
+ else:
299
+ logger.info(f"No signal detected in AI response for group {group_key}")
300
 
301
  logger.info("Analysis cycle completed successfully")
302
 
303
  except Exception as e:
304
+ logger.error(f"Error in analyze_forex_groups: {e}", exc_info=True)
305
 
306
  # Flask routes
307
  @app.route('/')
 
309
  """Health check endpoint to verify the service is running"""
310
  return jsonify({
311
  "status": "running",
312
+ "message": "Forex Analysis System is active"
 
313
  })
314
 
315
  @app.route('/analyze/now')
 
319
  analyze_forex_groups()
320
  return jsonify({
321
  "status": "success",
322
+ "message": "Analysis triggered successfully"
 
323
  })
324
  except Exception as e:
325
  logger.error(f"Error triggering analysis: {e}", exc_info=True)
 
332
  def get_status():
333
  """Endpoint to get system status"""
334
  try:
335
+ deeper_response = deeper.fetch_json_from_github()
336
+ if deeper_response["success"]:
337
+ system_status = deeper_response["data"].get("status", False)
338
+ return jsonify({
339
+ "system_enabled": system_status,
340
+ "service_status": "running"
341
+ })
342
+ else:
343
+ return jsonify({
344
+ "service_status": "running",
345
+ "error": deeper_response["message"]
346
+ })
 
 
 
 
 
347
  except Exception as e:
348
  logger.error(f"Error getting status: {e}", exc_info=True)
349
  return jsonify({
 
351
  "error": str(e)
352
  })
353
 
354
+ def schedule_candle_analysis():
355
+ """Schedule analysis at candle close times (00, 15, 30, 45)"""
356
+ now = datetime.now()
357
+ current_minute = now.minute
358
+
359
+ # Calculate the next 15-minute mark
360
+ if current_minute < 17:
361
+ next_minute = 17
362
+ elif current_minute < 32:
363
+ next_minute = 32
364
+ elif current_minute < 47:
365
+ next_minute = 47
366
+ else:
367
+ next_minute = 0 # Next hour
368
 
369
+ # Calculate the target time
370
+ if next_minute == 0:
371
+ target_time = datetime(now.year, now.month, now.day, now.hour, 0, 0) + timedelta(hours=1)
372
+ else:
373
+ target_time = datetime(now.year, now.month, now.day, now.hour, next_minute, 0)
374
+
375
+ # Calculate seconds until next run
376
+ time_diff = (target_time - now).total_seconds()
377
+
378
+ logger.info(f"Scheduling analysis to run at {target_time.strftime('%H:%M:%S')} (in {time_diff:.1f} seconds)")
379
 
380
+ # Schedule the job
381
  scheduler.add_job(
382
+ run_analysis_and_reschedule,
383
+ 'date',
384
+ run_date=target_time,
385
+ id='candle_analysis'
 
 
386
  )
387
 
388
+ def run_analysis_and_reschedule():
389
+ """Run the analysis and schedule the next run"""
390
+ logger.info("Running scheduled analysis at candle close")
391
+ try:
392
+ analyze_forex_groups()
393
+ except Exception as e:
394
+ logger.error(f"Error in scheduled analysis: {e}", exc_info=True)
395
+ finally:
396
+ # Always reschedule the next run
397
+ schedule_candle_analysis()
398
+
399
+ # Initialize scheduler
400
+ scheduler = BackgroundScheduler()
401
+
402
+ def start_scheduler():
403
+ """Start the scheduler with the analysis job at candle close times"""
404
+ logger.info("Starting scheduler for forex analysis at candle close times (00, 15, 30, 45)")
405
+
406
+ # Schedule the first analysis
407
+ schedule_candle_analysis()
408
+
409
  # Start the scheduler if it's not already running
410
  if not scheduler.running:
411
  scheduler.start()