Dooratre commited on
Commit
aa1e73c
·
verified ·
1 Parent(s): c845e0a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +283 -323
app.py CHANGED
@@ -1,13 +1,14 @@
1
- import time
2
- import json
3
  import requests
 
 
4
  import re
5
  import logging
 
6
  from flask import Flask, jsonify
7
  from apscheduler.schedulers.background import BackgroundScheduler
8
- from datetime import datetime, timedelta
9
- from db import deeper, signals
10
- from ai import analyze_forex_pairs
11
 
12
  # Configure logging
13
  logging.basicConfig(
@@ -24,284 +25,271 @@ TELEGRAM_TOKEN = "7750258010:AAEfEn1Hc1h0n6uRc1KcPdZf7ozBEkehnEY"
24
  TELEGRAM_CHAT_ID = "6859142642"
25
  TELEGRAM_CHAT_ID2 = "5666511049"
26
 
27
- def send_telegram_message(message):
28
- """Send message to Telegram user"""
29
  url = f"https://api.telegram.org/bot{TELEGRAM_TOKEN}/sendMessage"
30
  data = {
31
  "chat_id": TELEGRAM_CHAT_ID,
32
- "text": message,
33
  "parse_mode": "HTML"
34
  }
35
  data2 = {
36
  "chat_id": TELEGRAM_CHAT_ID2,
37
- "text": message,
38
  "parse_mode": "HTML"
39
  }
40
  try:
41
- logger.info(f"Sending Telegram message to chat ID: {TELEGRAM_CHAT_ID}")
42
  response = requests.post(url, data=data)
43
  response2 = requests.post(url, data=data2)
44
- response_json = response.json()
45
-
46
- if response_json.get("ok"):
47
- logger.info("Telegram message sent successfully")
48
- return response_json
49
- else:
50
- logger.error(f"Telegram API error: {response_json.get('description', 'Unknown error')}")
51
- return None
52
  except Exception as e:
53
- logger.error(f"Error sending Telegram message: {e}", exc_info=True)
54
- return None
55
-
56
- def get_active_signals():
57
- """Get all active signals from the signals database"""
58
- signals_response = signals.fetch_json_from_github()
59
-
60
- if not signals_response["success"]:
61
- logger.error(f"Error fetching signals data: {signals_response['message']}")
62
- return "لا توجد صفقات نشطة حالياً."
63
 
64
- signals_data = signals_response["data"]
65
-
66
- # Defensive check - ensure signals_data is a list
67
- if not isinstance(signals_data, list) or len(signals_data) == 0:
68
- logger.warning("No active signals found in database")
69
- return "لا توجد صفقات نشطة حالياً."
70
-
71
- deals_string = []
72
- for signal in signals_data:
73
- deal = f"الزوج: {signal.get('pair', '')}, "
74
- deal += f"النوع: {signal.get('type', '')}, "
75
- deal += f"سعر الدخول: {signal.get('entry', '')}, "
76
- deal += f"وقف الخسارة: {signal.get('stop_loss', '')}, "
77
- deal += f"الهدف: {signal.get('take_profit', '')}"
78
- deals_string.append(deal)
79
-
80
- if not deals_string:
81
- return "لا توجد صفقات نشطة حالياً."
82
-
83
- return "\n".join(deals_string)
84
-
85
- def extract_signal_from_ai_response(response):
86
- """Extract signal data from AI response if present"""
87
- signal_pattern = r'<signal>(.*?)</signal>'
88
- match = re.search(signal_pattern, response, re.DOTALL)
89
-
90
- if not match:
91
- return None
92
-
93
- signal_text = match.group(1)
94
-
95
- # Extract individual fields from signal - full format
96
- pair_match = re.search(r'<pair>(.*?)</pair>', signal_text, re.DOTALL)
97
- type_match = re.search(r'<type>(.*?)</type>', signal_text, re.DOTALL)
98
- entry_match = re.search(r'<entry>(.*?)</entry>', signal_text, re.DOTALL)
99
- stop_loss_match = re.search(r'<stop_loss>(.*?)</stop_loss>', signal_text, re.DOTALL)
100
- take_profit_match = re.search(r'<take_profit>(.*?)</take_profit>', signal_text, re.DOTALL)
101
- duration_match = re.search(r'<duration>(.*?)</duration>', signal_text, re.DOTALL)
102
- reason_match = re.search(r'<reason>(.*?)</reason>', signal_text, re.DOTALL)
103
-
104
- # Create signal dictionary
105
- signal_data = {
106
- "pair": pair_match.group(1).strip() if pair_match else "",
107
- "timeframe": "15min", # Default timeframe
108
- "type": type_match.group(1).strip() if type_match else "",
109
- "entry": entry_match.group(1).strip() if entry_match else "",
110
- "stop_loss": stop_loss_match.group(1).strip() if stop_loss_match else "",
111
- "take_profit": take_profit_match.group(1).strip() if take_profit_match else "",
112
- "duration": duration_match.group(1).strip() if duration_match else "1-3 ساعات",
113
- "reason": reason_match.group(1).strip() if reason_match else "تم التحليل بواسطة النظام الآلي",
114
- "status": "starting"
115
  }
116
 
117
- return signal_data
118
-
119
- def check_if_pairs_exist_in_signals(pairs_list):
120
- """Check if a list of pairs already exists in the signals database"""
121
- signals_response = signals.fetch_json_from_github()
122
-
123
- if not signals_response["success"]:
124
- logger.error(f"Error fetching signals data: {signals_response['message']}")
125
- return False
126
-
127
- signals_data = signals_response["data"]
128
-
129
- # Defensive check - ensure signals_data is a list
130
- if not isinstance(signals_data, list):
131
- logger.warning(f"signals_data is not a list (type: {type(signals_data)}), converting to empty list")
132
- signals_data = []
133
-
134
- # Convert pairs_list to a sorted JSON string for consistent comparison
135
- pairs_json = json.dumps(sorted(pairs_list), ensure_ascii=False)
136
-
137
- # Check if pairs list exists
138
- for signal in signals_data:
139
- if "pairs" in signal and json.dumps(sorted(signal.get("pairs", [])), ensure_ascii=False) == pairs_json:
140
- return True
141
-
142
- return False
143
-
144
- def update_signals_file(signal_data, pairs_list):
145
- """Update signals JSON file with new signal data including pairs list"""
146
- # Fetch current signals data
147
- signals_response = signals.fetch_json_from_github()
148
-
149
- if not signals_response["success"]:
150
- logger.error(f"Error fetching signals data: {signals_response['message']}")
151
- return False
152
-
153
- signals_data = signals_response["data"]
154
-
155
- # Defensive check - ensure signals_data is a list
156
- if not isinstance(signals_data, list):
157
- logger.warning(f"signals_data is not a list (type: {type(signals_data)}), converting to empty list")
158
- signals_data = []
159
-
160
- # Add pairs list to the signal data
161
- signal_data["pairs"] = pairs_list
162
-
163
- # Add new signal
164
- signals_data.append(signal_data)
165
-
166
- # Get authenticity token and commit OID for signals file
167
- auth_token, commit_oid = signals.fetch_authenticity_token_and_commit_oid()
168
- if not auth_token or not commit_oid:
169
- logger.error("Failed to get authenticity token or commit OID for signals file")
170
- return False
171
-
172
- # Update the signals file - using json.dumps with separators to ensure a single line
173
- update_response = signals.update_user_json_file(
174
- auth_token,
175
- commit_oid,
176
- json.dumps(signals_data, ensure_ascii=False, separators=(',', ':'))
177
- )
178
-
179
- return update_response["success"]
180
-
181
- def remove_group_from_deeper(group_key):
182
- """Remove analyzed group from deeper.json file"""
183
- # Fetch current deeper data
184
- deeper_response = deeper.fetch_json_from_github()
185
-
186
- if not deeper_response["success"]:
187
- logger.error(f"Error fetching deeper data: {deeper_response['message']}")
188
- return False
189
-
190
- deeper_data = deeper_response["data"]
191
 
192
- # Remove the group from forwards if it exists
193
- if group_key in deeper_data.get("forwards", {}):
194
- del deeper_data["forwards"][group_key]
 
 
 
 
 
 
 
195
 
196
- # Get authenticity token and commit OID for deeper file
197
- auth_token, commit_oid = deeper.fetch_authenticity_token_and_commit_oid()
198
- if not auth_token or not commit_oid:
199
- logger.error("Failed to get authenticity token or commit OID for deeper file")
200
- return False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
201
 
202
- # Update the deeper file - using json.dumps with separators to ensure a single line
203
- update_response = deeper.update_user_json_file(
204
- auth_token,
205
- commit_oid,
206
- json.dumps(deeper_data, ensure_ascii=False, separators=(',', ':'))
207
- )
208
 
209
- return update_response["success"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
210
 
211
- def format_telegram_message(signal_data):
212
- """تنسيق إشارة الفوركس للإرسال عبر تيليجرام - بصيغة احترافية"""
213
- message = "🔔 <b>إشارة فوركس جديدة</b> 🔔\n\n"
214
- message += f"<b>🔹 الزوج:</b> {signal_data['pair']}\n"
215
- message += f"<b>📊 النوع:</b> {signal_data['type']}\n"
216
- message += f"<b>🎯 الدخول:</b> {signal_data['entry']}\n"
217
- message += f"<b>🛡️ وقف الخسارة:</b> {signal_data['stop_loss']}\n"
218
- message += f"<b>✨ الهدف:</b> {signal_data['take_profit']}\n"
 
219
 
220
- return message
221
 
222
- def analyze_forex_groups():
223
- """Function to analyze forex groups from the deeper.json file"""
224
- logger.info("Starting forex group analysis cycle")
 
225
 
226
  try:
227
- # Fetch data from deeper.json
228
- deeper_response = deeper.fetch_json_from_github()
229
-
230
- if not deeper_response["success"]:
231
- logger.error(f"Error fetching deeper data: {deeper_response['message']}")
232
- return
233
 
234
- deeper_data = deeper_response["data"]
235
-
236
- # Check if system is enabled
237
- if not deeper_data.get("status", False):
238
- logger.info("System is currently turned OFF. Please turn it ON to continue.")
239
  return
240
 
241
- # Get active signals to pass to AI
242
- active_deals = get_active_signals()
243
- logger.info(f"Active deals: {active_deals}")
244
-
245
- # Process each forex group in the forwards section
246
- for group_key, group_data in deeper_data.get("forwards", {}).items():
247
- pairs = group_data.get("pairs", [])
248
- message = group_data.get("message", "")
249
-
250
- if not pairs:
251
- logger.warning(f"Group {group_key} has no pairs. Skipping.")
252
- continue
253
-
254
- logger.info(f"Analyzing group {group_key} with pairs: {', '.join(pairs)}")
255
-
256
- # Check if this group already exists in signals database
257
- if check_if_pairs_exist_in_signals(pairs):
258
- logger.info(f"Signal for group {group_key} already exists in database. Skipping analysis.")
259
-
260
- # Remove the group from deeper.json since we're not processing it
261
- if remove_group_from_deeper(group_key):
262
- logger.info(f"Group {group_key} removed from deeper.json successfully")
263
- else:
264
- logger.error(f"Failed to remove group {group_key} from deeper.json")
265
-
266
- continue
267
-
268
- # Call AI to analyze the forex pairs with active deals
269
- ai_response = analyze_forex_pairs(pairs, message, active_deals)
270
-
271
- # Check if the AI response contains a signal
272
- signal_data = extract_signal_from_ai_response(ai_response)
273
-
274
- if signal_data:
275
- logger.info(f"Signal detected for group {group_key}")
276
-
277
- # Update signals file with the new signal, including pairs list
278
- if update_signals_file(signal_data, pairs):
279
- logger.info(f"Signal for group {group_key} saved successfully")
280
-
281
- # Format and send Telegram message AFTER successfully saving to database
282
- telegram_message = format_telegram_message(signal_data)
283
- logger.info("Attempting to send Telegram message...")
284
- telegram_response = send_telegram_message(telegram_message)
285
-
286
- if telegram_response and telegram_response.get("ok"):
287
- logger.info(f"Telegram message for group {group_key} sent successfully")
 
 
288
  else:
289
- logger.error(f"Failed to send Telegram message for group {group_key}. Response: {telegram_response}")
290
-
291
- # Remove the group from deeper.json
292
- if remove_group_from_deeper(group_key):
293
- logger.info(f"Group {group_key} removed from deeper.json successfully")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
294
  else:
295
- logger.error(f"Failed to remove group {group_key} from deeper.json")
296
  else:
297
- logger.error(f"Failed to save signal for group {group_key}")
298
- else:
299
- logger.info(f"No signal detected in AI response for group {group_key}")
300
-
301
- logger.info("Analysis cycle completed successfully")
302
 
303
  except Exception as e:
304
- logger.error(f"Error in analyze_forex_groups: {e}", exc_info=True)
305
 
306
  # Flask routes
307
  @app.route('/')
@@ -309,102 +297,74 @@ def health_check():
309
  """Health check endpoint to verify the service is running"""
310
  return jsonify({
311
  "status": "running",
312
- "message": "Forex Analysis System is active"
 
313
  })
314
 
315
- @app.route('/analyze/now')
316
- def trigger_analysis():
317
- """Endpoint to manually trigger analysis"""
318
  try:
319
- analyze_forex_groups()
320
  return jsonify({
321
  "status": "success",
322
- "message": "Analysis triggered successfully"
 
323
  })
324
  except Exception as e:
325
- logger.error(f"Error triggering analysis: {e}", exc_info=True)
326
  return jsonify({
327
  "status": "error",
328
- "message": f"Error triggering analysis: {str(e)}"
329
  }), 500
330
 
331
- @app.route('/status')
332
- def get_status():
333
- """Endpoint to get system status"""
334
- try:
335
- deeper_response = deeper.fetch_json_from_github()
336
- if deeper_response["success"]:
337
- system_status = deeper_response["data"].get("status", False)
338
- return jsonify({
339
- "system_enabled": system_status,
340
- "service_status": "running"
341
- })
342
- else:
343
- return jsonify({
344
- "service_status": "running",
345
- "error": deeper_response["message"]
346
- })
347
- except Exception as e:
348
- logger.error(f"Error getting status: {e}", exc_info=True)
349
- return jsonify({
350
- "service_status": "running",
351
- "error": str(e)
352
- })
353
 
354
- def schedule_candle_analysis():
355
- """Schedule analysis at candle close times (00, 15, 30, 45)"""
356
  now = datetime.now()
 
357
  current_minute = now.minute
 
 
 
358
 
359
- # Calculate the next 15-minute mark
360
- if current_minute < 17:
361
- next_minute = 17
362
- elif current_minute < 32:
363
- next_minute = 32
364
- elif current_minute < 47:
365
- next_minute = 47
366
- else:
367
- next_minute = 0 # Next hour
368
-
369
- # Calculate the target time
370
- if next_minute == 0:
371
- target_time = datetime(now.year, now.month, now.day, now.hour, 0, 0) + timedelta(hours=1)
372
- else:
373
- target_time = datetime(now.year, now.month, now.day, now.hour, next_minute, 0)
374
 
375
- # Calculate seconds until next run
376
- time_diff = (target_time - now).total_seconds()
377
 
378
- logger.info(f"Scheduling analysis to run at {target_time.strftime('%H:%M:%S')} (in {time_diff:.1f} seconds)")
 
379
 
380
- # Schedule the job
381
  scheduler.add_job(
382
- run_analysis_and_reschedule,
383
  'date',
384
- run_date=target_time,
385
- id='candle_analysis'
 
386
  )
387
 
388
- def run_analysis_and_reschedule():
389
- """Run the analysis and schedule the next run"""
390
- logger.info("Running scheduled analysis at candle close")
391
- try:
392
- analyze_forex_groups()
393
- except Exception as e:
394
- logger.error(f"Error in scheduled analysis: {e}", exc_info=True)
395
- finally:
396
- # Always reschedule the next run
397
- schedule_candle_analysis()
398
-
399
- # Initialize scheduler
400
- scheduler = BackgroundScheduler()
401
-
402
  def start_scheduler():
403
- """Start the scheduler with the analysis job at candle close times"""
404
- logger.info("Starting scheduler for forex analysis at candle close times (00, 15, 30, 45)")
405
 
406
- # Schedule the first analysis
407
- schedule_candle_analysis()
408
 
409
  # Start the scheduler if it's not already running
410
  if not scheduler.running:
@@ -412,7 +372,7 @@ def start_scheduler():
412
  logger.info("Scheduler started successfully")
413
 
414
  if __name__ == "__main__":
415
- logger.info("Starting Forex Analysis System...")
416
 
417
  # Start the scheduler
418
  start_scheduler()
 
 
 
1
  import requests
2
+ import json
3
+ import time
4
  import re
5
  import logging
6
+ from datetime import datetime, timedelta
7
  from flask import Flask, jsonify
8
  from apscheduler.schedulers.background import BackgroundScheduler
9
+ from apscheduler.triggers.cron import CronTrigger
10
+ from db.signals import fetch_json_from_github, update_user_json_file, fetch_authenticity_token_and_commit_oid
11
+ from ai import analyze_forex_pairs # Import the updated analyze_forex_pairs function
12
 
13
  # Configure logging
14
  logging.basicConfig(
 
25
  TELEGRAM_CHAT_ID = "6859142642"
26
  TELEGRAM_CHAT_ID2 = "5666511049"
27
 
28
+ def send_telegram_message(text):
29
+ """Send a message to Telegram."""
30
  url = f"https://api.telegram.org/bot{TELEGRAM_TOKEN}/sendMessage"
31
  data = {
32
  "chat_id": TELEGRAM_CHAT_ID,
33
+ "text": text,
34
  "parse_mode": "HTML"
35
  }
36
  data2 = {
37
  "chat_id": TELEGRAM_CHAT_ID2,
38
+ "text": text,
39
  "parse_mode": "HTML"
40
  }
41
  try:
 
42
  response = requests.post(url, data=data)
43
  response2 = requests.post(url, data=data2)
44
+ return response.json()
 
 
 
 
 
 
 
45
  except Exception as e:
46
+ logger.error(f"Error sending Telegram message: {e}")
47
+ return {"ok": False, "error": str(e)}
 
 
 
 
 
 
 
 
48
 
49
+ def send_to_api(pair_name, message):
50
+ """Send the message to the external API."""
51
+ url = "https://forextrade-app.hf.space/api/message" # Replace with your actual API URL
52
+ data = {
53
+ "pair_name": pair_name,
54
+ "message": message
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  }
56
 
57
+ headers = {
58
+ "Content-Type": "application/json"
59
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
 
61
+ try:
62
+ response = requests.post(url, json=data, headers=headers, timeout=30)
63
+ response.raise_for_status()
64
+ return {"success": True, "response": response.json()}
65
+ except requests.exceptions.RequestException as e:
66
+ logger.error(f"API request error: {e}")
67
+ return {"success": False, "error": str(e)}
68
+ except Exception as e:
69
+ logger.error(f"Unexpected error sending to API: {e}")
70
+ return {"success": False, "error": str(e)}
71
 
72
+ def extract_message_content(analysis_text, pair_name):
73
+ """Extract message, reason, prediction, and advice from the analysis text."""
74
+ try:
75
+ # Check for regular message
76
+ message_match = re.search(r'<send_message>\s*<message>(.*?)</message>\s*<reason>(.*?)</reason>\s*<Prediction>(.*?)</Prediction>\s*<Advice>(.*?)</Advice>\s*</send_message>',
77
+ analysis_text, re.DOTALL)
78
+
79
+ if message_match:
80
+ message = message_match.group(1).strip()
81
+ reason = message_match.group(2).strip()
82
+ prediction = message_match.group(3).strip()
83
+ advice = message_match.group(4).strip()
84
+
85
+ return {
86
+ "type": "regular",
87
+ "message": message,
88
+ "reason": reason,
89
+ "prediction": prediction,
90
+ "advice": advice,
91
+ "full_message": f"🔹 {pair_name} 🔹\n\n💬 الرسالة: {message}\n\n📝 النصيحة: {advice}"
92
+ }
93
+
94
+ # Check for close deal message
95
+ close_match = re.search(r'<close_deal>(.*?)</close_deal>', analysis_text, re.DOTALL)
96
+ if close_match:
97
+ close_reason = close_match.group(1).strip()
98
+ return {
99
+ "type": "close",
100
+ "reason": close_reason,
101
+ "full_message": f"🔴 إغلاق صفقة 🔴\n\n🔹 {pair_name} 🔹\n\n⛔️ تم إغلاق الصفقة\n\n🧠 السبب: {close_reason}"
102
+ }
103
 
104
+ return None
105
+ except Exception as e:
106
+ logger.error(f"Error extracting message content: {e}")
107
+ return None
 
 
108
 
109
+ def extract_all_messages(analysis_text):
110
+ """Extract all messages for different pairs from the analysis text."""
111
+ try:
112
+ # First check if there are any pair-specific sections
113
+ pair_sections = re.findall(r'🔹 ([A-Z]+) 🔹(.*?)(?=🔹 [A-Z]+ 🔹|$)', analysis_text, re.DOTALL)
114
+
115
+ if pair_sections:
116
+ results = {}
117
+ for pair, content in pair_sections:
118
+ # Check for regular message
119
+ message_match = re.search(r'💬 الرسالة: (.*?)(?=🧠 السبب:|$)', content, re.DOTALL)
120
+ reason_match = re.search(r'🧠 السبب: (.*?)(?=🔮 التوقع:|$)', content, re.DOTALL)
121
+ prediction_match = re.search(r'🔮 التوقع: (.*?)(?=📝 النصيحة:|$)', content, re.DOTALL)
122
+ advice_match = re.search(r'📝 النصيحة: (.*?)$', content, re.DOTALL)
123
+
124
+ close_match = re.search(r'⛔️ تم إغلاق الصفقة\s*🧠 السبب: (.*?)$', content, re.DOTALL)
125
+
126
+ if message_match and reason_match and prediction_match and advice_match:
127
+ message = message_match.group(1).strip()
128
+ reason = reason_match.group(1).strip()
129
+ prediction = prediction_match.group(1).strip()
130
+ advice = advice_match.group(1).strip()
131
+
132
+ results[pair] = {
133
+ "type": "regular",
134
+ "message": message,
135
+ "reason": reason,
136
+ "prediction": prediction,
137
+ "advice": advice,
138
+ "full_message": f"🔹 {pair} 🔹\n\n💬 الرسالة: {message}\n\n📝 النصيحة: {advice}"
139
+ }
140
+ elif close_match:
141
+ close_reason = close_match.group(1).strip()
142
+ results[pair] = {
143
+ "type": "close",
144
+ "reason": close_reason,
145
+ "full_message": f"🔴 إغلاق صفقة 🔴\n\n🔹 {pair} 🔹\n\n⛔️ تم إغلاق الصفقة\n\n🧠 السبب: {close_reason}"
146
+ }
147
+
148
+ return results
149
+ else:
150
+ # Fall back to the old extraction method if no pair-specific sections are found
151
+ return None
152
+ except Exception as e:
153
+ logger.error(f"Error extracting all messages: {e}")
154
+ return None
155
 
156
+ def format_deal_details(signal):
157
+ """Format the signal details for the AI analysis."""
158
+ details = f"زوج: {signal.get('pair')}, "
159
+ details += f"نوع: {signal.get('type')}, "
160
+ details += f"سعر الدخول: {signal.get('entry')}, "
161
+ details += f"وقف الخسارة: {signal.get('stop_loss')}, "
162
+ details += f"هدف الربح: {signal.get('take_profit')}, "
163
+ details += f"المدة: {signal.get('duration')}, "
164
+ details += f"السبب: {signal.get('reason')}"
165
 
166
+ return details
167
 
168
+ def check_and_process_signals():
169
+ """Check for signals and process them."""
170
+ current_time = datetime.now()
171
+ logger.info(f"Processing signals at candle close: {current_time.strftime('%Y-%m-%d %H:%M:%S')}")
172
 
173
  try:
174
+ # Fetch the signals from GitHub
175
+ result = fetch_json_from_github()
 
 
 
 
176
 
177
+ if not result["success"]:
178
+ logger.error(f"Error fetching signals: {result['message']}")
 
 
 
179
  return
180
 
181
+ signals = result["data"]
182
+ updated = False
183
+ signals_to_remove = []
184
+
185
+ for i, signal in enumerate(signals):
186
+ # Check if the signal is in "starting" status
187
+ if signal.get("status") == "starting":
188
+ pairs = signal.get("pairs", [signal.get("pair")])
189
+ main_pair = signal.get("pair")
190
+ logger.info(f"Processing signal for main pair {main_pair} with group: {pairs}")
191
+
192
+ try:
193
+ # Format the deal details for AI
194
+ deal_details = format_deal_details(signal)
195
+
196
+ # Analyze the forex pairs using the updated function
197
+ analysis = analyze_forex_pairs(pairs, deal_details)
198
+
199
+ # First try to extract all messages
200
+ all_messages = extract_all_messages(analysis)
201
+
202
+ if all_messages:
203
+ # Process each pair's message
204
+ close_signal = False
205
+
206
+ for pair, message_data in all_messages.items():
207
+ # Send to Telegram
208
+ telegram_result = send_telegram_message(message_data["full_message"])
209
+ if telegram_result.get("ok"):
210
+ logger.info(f"Telegram message sent for {pair}")
211
+ else:
212
+ logger.error(f"Failed to send Telegram message: {telegram_result}")
213
+
214
+ # Handle based on message type
215
+ if message_data["type"] == "regular":
216
+ # Send to API
217
+ api_result = send_to_api(pair, message_data["message"])
218
+ if api_result["success"]:
219
+ logger.info(f"API message sent for {pair}")
220
+ else:
221
+ logger.error(f"Failed to send to API: {api_result['error']}")
222
+ elif message_data["type"] == "close" and pair == main_pair:
223
+ # Only mark for removal if the main pair has a close signal
224
+ close_signal = True
225
+
226
+ if close_signal:
227
+ signals_to_remove.append(i)
228
+ updated = True
229
+ logger.info(f"Signal for {main_pair} marked for removal due to close deal")
230
  else:
231
+ # Fall back to old extraction method for backward compatibility
232
+ if "<send_message>" in analysis or "<close_deal>" in analysis:
233
+ message_data = extract_message_content(analysis, main_pair)
234
+
235
+ if message_data:
236
+ # Send to Telegram
237
+ telegram_result = send_telegram_message(message_data["full_message"])
238
+ if telegram_result.get("ok"):
239
+ logger.info(f"Telegram message sent for {main_pair}")
240
+ else:
241
+ logger.error(f"Failed to send Telegram message: {telegram_result}")
242
+
243
+ # Handle based on message type
244
+ if message_data["type"] == "regular":
245
+ # Send to API
246
+ api_result = send_to_api(main_pair, message_data["message"])
247
+ if api_result["success"]:
248
+ logger.info(f"API message sent for {main_pair}")
249
+ else:
250
+ logger.error(f"Failed to send to API: {api_result['error']}")
251
+ elif message_data["type"] == "close":
252
+ # Mark the signal for removal
253
+ signals_to_remove.append(i)
254
+ updated = True
255
+ logger.info(f"Signal for {main_pair} marked for removal due to close deal")
256
+ else:
257
+ logger.warning(f"Could not extract message content for {main_pair}")
258
+ else:
259
+ logger.info(f"No message to send for {main_pair}")
260
+ except Exception as e:
261
+ logger.error(f"Error processing signal for {main_pair}: {e}", exc_info=True)
262
+ # Continue with other signals instead of breaking
263
+
264
+ # Remove signals marked for deletion (in reverse order to avoid index issues)
265
+ for index in sorted(signals_to_remove, reverse=True):
266
+ signals.pop(index)
267
+
268
+ # If any signals were updated, update the GitHub file
269
+ if updated:
270
+ try:
271
+ # Convert the updated signals to JSON
272
+ updated_content = json.dumps(signals, separators=(',', ':'))
273
+
274
+ # Fetch the authenticity token and commit OID
275
+ authenticity_token, commit_oid = fetch_authenticity_token_and_commit_oid()
276
+
277
+ if authenticity_token and commit_oid:
278
+ # Update the GitHub file
279
+ update_result = update_user_json_file(authenticity_token, commit_oid, updated_content)
280
+ if update_result["success"]:
281
+ logger.info("GitHub signals file updated successfully")
282
  else:
283
+ logger.error(f"Failed to update GitHub file: {update_result['message']}")
284
  else:
285
+ logger.error("Failed to get authenticity token and commit OID")
286
+ except Exception as e:
287
+ logger.error(f"Error updating GitHub file: {e}", exc_info=True)
288
+ else:
289
+ logger.info("No signals needed updating")
290
 
291
  except Exception as e:
292
+ logger.error(f"Unexpected error in check_and_process_signals: {e}", exc_info=True)
293
 
294
  # Flask routes
295
  @app.route('/')
 
297
  """Health check endpoint to verify the service is running"""
298
  return jsonify({
299
  "status": "running",
300
+ "message": "Forex Signal Processing System is active",
301
+ "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
302
  })
303
 
304
+ @app.route('/process/now')
305
+ def trigger_processing():
306
+ """Endpoint to manually trigger signal processing"""
307
  try:
308
+ check_and_process_signals()
309
  return jsonify({
310
  "status": "success",
311
+ "message": "Signal processing triggered successfully",
312
+ "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
313
  })
314
  except Exception as e:
315
+ logger.error(f"Error triggering signal processing: {e}", exc_info=True)
316
  return jsonify({
317
  "status": "error",
318
+ "message": f"Error triggering signal processing: {str(e)}"
319
  }), 500
320
 
321
+ # Initialize scheduler
322
+ scheduler = BackgroundScheduler(daemon=True)
323
+
324
+ def add_candle_close_jobs():
325
+ """Add jobs to run at 5-minute candle closes"""
326
+ logger.info("Setting up scheduler jobs for candle closes")
327
+
328
+ # Schedule jobs to run at each 5-minute mark (00:00, 00:05, 00:10, etc.)
329
+ scheduler.add_job(
330
+ check_and_process_signals,
331
+ CronTrigger(minute='0,5,10,15,20,25,30,35,40,45,50,55'),
332
+ id='candle_close_processing',
333
+ replace_existing=True,
334
+ )
 
 
 
 
 
 
 
 
335
 
336
+ # Calculate time to next candle close
 
337
  now = datetime.now()
338
+ # Get the minutes and find the next 5-minute mark
339
  current_minute = now.minute
340
+ next_5min = ((current_minute // 5) + 1) * 5
341
+ if next_5min >= 60:
342
+ next_5min = 0
343
 
344
+ next_run = now.replace(minute=next_5min, second=5, microsecond=0) # 5 seconds after the minute mark
345
+ if next_run <= now:
346
+ next_run = next_run + timedelta(hours=1)
 
 
 
 
 
 
 
 
 
 
 
 
347
 
348
+ time_to_next = (next_run - now).total_seconds()
 
349
 
350
+ logger.info(f"Next candle close processing scheduled for: {next_run.strftime('%Y-%m-%d %H:%M:%S')} "
351
+ f"(in {time_to_next:.2f} seconds)")
352
 
353
+ # Also add a one-time job that runs at the start
354
  scheduler.add_job(
355
+ check_and_process_signals,
356
  'date',
357
+ run_date=datetime.now() + timedelta(seconds=10), # Run 10 seconds after startup
358
+ id='initial_check',
359
+ replace_existing=True
360
  )
361
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
362
  def start_scheduler():
363
+ """Start the scheduler with the candle close processing jobs"""
364
+ logger.info("Starting scheduler for candle close signal processing")
365
 
366
+ # Add the candle close jobs
367
+ add_candle_close_jobs()
368
 
369
  # Start the scheduler if it's not already running
370
  if not scheduler.running:
 
372
  logger.info("Scheduler started successfully")
373
 
374
  if __name__ == "__main__":
375
+ logger.info("Starting Forex Signal Processing System...")
376
 
377
  # Start the scheduler
378
  start_scheduler()