Upload 5 files
Browse files- db/__pycache__/deeper.cpython-310.pyc +0 -0
- db/__pycache__/paires.cpython-310.pyc +0 -0
- db/__pycache__/signals.cpython-310.pyc +0 -0
- db/deeper.py +125 -0
- db/signals.py +125 -0
db/__pycache__/deeper.cpython-310.pyc
ADDED
Binary file (4.66 kB). View file
|
|
db/__pycache__/paires.cpython-310.pyc
ADDED
Binary file (4.66 kB). View file
|
|
db/__pycache__/signals.cpython-310.pyc
ADDED
Binary file (4.68 kB). View file
|
|
db/deeper.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from bs4 import BeautifulSoup
|
3 |
+
import json
|
4 |
+
|
5 |
+
TOKEN = "_octo=GH1.1.1509769180.1721774314; _device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; saved_user_sessions=155741452%3A7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; user_session=7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; __Host-user_session_same_site=7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; logged_in=yes; dotcom_user=omarnuwrar; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; _gh_sess=oyETeSUzPODINtXA9muci3RN0%2ByPh4et1k09FFSQadtrfdlJdLN4Hb77yeSq2QBSaTLHSC6UnJX5l3e%2BJ26mupuP%2BAjf5oHGSk6ptSkvNns5iPHx0ZgLFB5YGVPRqqnSu1F1PMuwuIKY5Km%2Fiw6wYAyou2WoRrllHvaW%2B%2FTYZDl%2BbBi3LEV4mCS9VDOQzseblH2j%2FqYRiGvok2aTj3AXTNnK0HQJ6O5bgCgCNtQQR8Q43G6if6NgQYA3RWvtJ1oOtxMvHKRSxHpCNgtzFMl%2F%2FuSgRwUdofsxuDT9IM076n7LMbNyO6lUY1Q5ww9oCxV0trPauDBc%2FsNa7lcqDP8RVOLZL2gmCXYXFwRWz%2B8%2B0S9gMptip%2FpZMd4%2BGZcK9k%2F0hswDEkuGFzm3r39Bw8nxCBQc6Yk%3D--VHpy7nUbR8UpQV2t--SkGKj5TPJ1F08U0anq2dfg%3D%3D"
|
6 |
+
# Step 1: Fetch the authenticity_token and commitOid from the GitHub edit page
|
7 |
+
def fetch_authenticity_token_and_commit_oid():
|
8 |
+
url = "https://github.com/omarnuwrar/Trading/edit/main/deeper.json"
|
9 |
+
|
10 |
+
headers = {
|
11 |
+
"cookie": TOKEN,
|
12 |
+
"if-none-match": 'W/"2ff86bd1792cfee5ed79ee070b3b46de"',
|
13 |
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
14 |
+
"x-github-target": "dotcom",
|
15 |
+
"x-react-router": "json",
|
16 |
+
"x-requested-with": "XMLHttpRequest",
|
17 |
+
}
|
18 |
+
|
19 |
+
response = requests.get(url, headers=headers)
|
20 |
+
|
21 |
+
if response.status_code == 200:
|
22 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
23 |
+
script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"})
|
24 |
+
|
25 |
+
if script_tag:
|
26 |
+
try:
|
27 |
+
json_data = json.loads(script_tag.string.strip())
|
28 |
+
authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/deeper.json"]["post"]
|
29 |
+
commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"]
|
30 |
+
return authenticity_token, commit_oid
|
31 |
+
except (KeyError, json.JSONDecodeError) as e:
|
32 |
+
print(f"Error: Failed to extract data. Details: {str(e)}")
|
33 |
+
return None, None
|
34 |
+
else:
|
35 |
+
print("Error: Could not find the required <script> tag.")
|
36 |
+
return None, None
|
37 |
+
else:
|
38 |
+
print(f"Error: Failed to fetch the page. Status code: {response.status_code}")
|
39 |
+
return None, None
|
40 |
+
|
41 |
+
# Step 2: Send the POST request to update the deeper.json file
|
42 |
+
def update_user_json_file(authenticity_token, commit_oid, new_content):
|
43 |
+
url = "https://github.com/omarnuwrar/Trading/tree-save/main/deeper.json"
|
44 |
+
|
45 |
+
headers = {
|
46 |
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
47 |
+
"x-requested-with": "XMLHttpRequest",
|
48 |
+
"github-verified-fetch": "true",
|
49 |
+
"content-type": "application/x-www-form-urlencoded",
|
50 |
+
"cookie": TOKEN,
|
51 |
+
}
|
52 |
+
|
53 |
+
payload = {
|
54 |
+
"message": "Update deeper.json",
|
55 |
+
"placeholder_message": "Update deeper.json",
|
56 |
+
"description": "",
|
57 |
+
"commit-choice": "direct",
|
58 |
+
"target_branch": "main",
|
59 |
+
"quick_pull": "",
|
60 |
+
"guidance_task": "",
|
61 |
+
"commit": commit_oid,
|
62 |
+
"same_repo": "1",
|
63 |
+
"pr": "",
|
64 |
+
"content_changed": "true",
|
65 |
+
"filename": "deeper.json",
|
66 |
+
"new_filename": "deeper.json",
|
67 |
+
"value": new_content,
|
68 |
+
"authenticity_token": authenticity_token,
|
69 |
+
}
|
70 |
+
|
71 |
+
response = requests.post(url, headers=headers, data=payload)
|
72 |
+
|
73 |
+
if response.status_code == 200:
|
74 |
+
return {"success": True, "message": "deeper.json has been updated!"}
|
75 |
+
else:
|
76 |
+
return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text}
|
77 |
+
|
78 |
+
|
79 |
+
|
80 |
+
# Function to fetch and extract the JSON data
|
81 |
+
def fetch_json_from_github():
|
82 |
+
# URL of the GitHub page
|
83 |
+
url = "https://github.com/omarnuwrar/Trading/blob/main/deeper.json"
|
84 |
+
|
85 |
+
# Custom headers
|
86 |
+
headers = {
|
87 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
88 |
+
"Cookie": TOKEN
|
89 |
+
}
|
90 |
+
|
91 |
+
try:
|
92 |
+
# Fetch the HTML content of the page
|
93 |
+
response = requests.get(url, headers=headers)
|
94 |
+
response.raise_for_status() # Raise an exception for HTTP errors
|
95 |
+
|
96 |
+
# Parse the HTML using BeautifulSoup
|
97 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
98 |
+
|
99 |
+
# Find the <script> tag with type="application/json" and `data-target="react-app.embeddedData"`
|
100 |
+
script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'})
|
101 |
+
if script_tag:
|
102 |
+
# Load the JSON content from the <script> tag
|
103 |
+
embedded_data = json.loads(script_tag.string)
|
104 |
+
|
105 |
+
# Navigate to the "blob" > "rawLines" key for the JSON in the file
|
106 |
+
raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", [])
|
107 |
+
if raw_lines:
|
108 |
+
# The JSON content is in the first element of the rawLines list
|
109 |
+
json_content = raw_lines[0]
|
110 |
+
|
111 |
+
# Parse the JSON content
|
112 |
+
data = json.loads(json_content)
|
113 |
+
|
114 |
+
# Return the extracted JSON data
|
115 |
+
return {"success": True, "data": data}
|
116 |
+
else:
|
117 |
+
return {"success": False, "message": "JSON data not found in the 'rawLines' key."}
|
118 |
+
else:
|
119 |
+
return {"success": False, "message": "Could not find the <script> tag with embedded JSON data."}
|
120 |
+
except requests.exceptions.RequestException as e:
|
121 |
+
return {"success": False, "message": f"Error fetching data: {e}"}
|
122 |
+
except json.JSONDecodeError as je:
|
123 |
+
return {"success": False, "message": f"Error parsing JSON: {je}"}
|
124 |
+
|
125 |
+
|
db/signals.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from bs4 import BeautifulSoup
|
3 |
+
import json
|
4 |
+
|
5 |
+
TOKEN = "_octo=GH1.1.1509769180.1721774314; _device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; saved_user_sessions=155741452%3A7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; user_session=7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; __Host-user_session_same_site=7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; logged_in=yes; dotcom_user=omarnuwrar; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; _gh_sess=oyETeSUzPODINtXA9muci3RN0%2ByPh4et1k09FFSQadtrfdlJdLN4Hb77yeSq2QBSaTLHSC6UnJX5l3e%2BJ26mupuP%2BAjf5oHGSk6ptSkvNns5iPHx0ZgLFB5YGVPRqqnSu1F1PMuwuIKY5Km%2Fiw6wYAyou2WoRrllHvaW%2B%2FTYZDl%2BbBi3LEV4mCS9VDOQzseblH2j%2FqYRiGvok2aTj3AXTNnK0HQJ6O5bgCgCNtQQR8Q43G6if6NgQYA3RWvtJ1oOtxMvHKRSxHpCNgtzFMl%2F%2FuSgRwUdofsxuDT9IM076n7LMbNyO6lUY1Q5ww9oCxV0trPauDBc%2FsNa7lcqDP8RVOLZL2gmCXYXFwRWz%2B8%2B0S9gMptip%2FpZMd4%2BGZcK9k%2F0hswDEkuGFzm3r39Bw8nxCBQc6Yk%3D--VHpy7nUbR8UpQV2t--SkGKj5TPJ1F08U0anq2dfg%3D%3D"
|
6 |
+
# Step 1: Fetch the authenticity_token and commitOid from the GitHub edit page
|
7 |
+
def fetch_authenticity_token_and_commit_oid():
|
8 |
+
url = "https://github.com/omarnuwrar/Trading/edit/main/signals.json"
|
9 |
+
|
10 |
+
headers = {
|
11 |
+
"cookie": TOKEN,
|
12 |
+
"if-none-match": 'W/"2ff86bd1792cfee5ed79ee070b3b46de"',
|
13 |
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
14 |
+
"x-github-target": "dotcom",
|
15 |
+
"x-react-router": "json",
|
16 |
+
"x-requested-with": "XMLHttpRequest",
|
17 |
+
}
|
18 |
+
|
19 |
+
response = requests.get(url, headers=headers)
|
20 |
+
|
21 |
+
if response.status_code == 200:
|
22 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
23 |
+
script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"})
|
24 |
+
|
25 |
+
if script_tag:
|
26 |
+
try:
|
27 |
+
json_data = json.loads(script_tag.string.strip())
|
28 |
+
authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/signals.json"]["post"]
|
29 |
+
commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"]
|
30 |
+
return authenticity_token, commit_oid
|
31 |
+
except (KeyError, json.JSONDecodeError) as e:
|
32 |
+
print(f"Error: Failed to extract data. Details: {str(e)}")
|
33 |
+
return None, None
|
34 |
+
else:
|
35 |
+
print("Error: Could not find the required <script> tag.")
|
36 |
+
return None, None
|
37 |
+
else:
|
38 |
+
print(f"Error: Failed to fetch the page. Status code: {response.status_code}")
|
39 |
+
return None, None
|
40 |
+
|
41 |
+
# Step 2: Send the POST request to update the signals.json file
|
42 |
+
def update_user_json_file(authenticity_token, commit_oid, new_content):
|
43 |
+
url = "https://github.com/omarnuwrar/Trading/tree-save/main/signals.json"
|
44 |
+
|
45 |
+
headers = {
|
46 |
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
47 |
+
"x-requested-with": "XMLHttpRequest",
|
48 |
+
"github-verified-fetch": "true",
|
49 |
+
"content-type": "application/x-www-form-urlencoded",
|
50 |
+
"cookie": TOKEN,
|
51 |
+
}
|
52 |
+
|
53 |
+
payload = {
|
54 |
+
"message": "Update signals.json",
|
55 |
+
"placeholder_message": "Update signals.json",
|
56 |
+
"description": "",
|
57 |
+
"commit-choice": "direct",
|
58 |
+
"target_branch": "main",
|
59 |
+
"quick_pull": "",
|
60 |
+
"guidance_task": "",
|
61 |
+
"commit": commit_oid,
|
62 |
+
"same_repo": "1",
|
63 |
+
"pr": "",
|
64 |
+
"content_changed": "true",
|
65 |
+
"filename": "signals.json",
|
66 |
+
"new_filename": "signals.json",
|
67 |
+
"value": new_content,
|
68 |
+
"authenticity_token": authenticity_token,
|
69 |
+
}
|
70 |
+
|
71 |
+
response = requests.post(url, headers=headers, data=payload)
|
72 |
+
|
73 |
+
if response.status_code == 200:
|
74 |
+
return {"success": True, "message": "signals.json has been updated!"}
|
75 |
+
else:
|
76 |
+
return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text}
|
77 |
+
|
78 |
+
|
79 |
+
|
80 |
+
# Function to fetch and extract the JSON data
|
81 |
+
def fetch_json_from_github():
|
82 |
+
# URL of the GitHub page
|
83 |
+
url = "https://github.com/omarnuwrar/Trading/blob/main/signals.json"
|
84 |
+
|
85 |
+
# Custom headers
|
86 |
+
headers = {
|
87 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
88 |
+
"Cookie": TOKEN
|
89 |
+
}
|
90 |
+
|
91 |
+
try:
|
92 |
+
# Fetch the HTML content of the page
|
93 |
+
response = requests.get(url, headers=headers)
|
94 |
+
response.raise_for_status() # Raise an exception for HTTP errors
|
95 |
+
|
96 |
+
# Parse the HTML using BeautifulSoup
|
97 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
98 |
+
|
99 |
+
# Find the <script> tag with type="application/json" and `data-target="react-app.embeddedData"`
|
100 |
+
script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'})
|
101 |
+
if script_tag:
|
102 |
+
# Load the JSON content from the <script> tag
|
103 |
+
embedded_data = json.loads(script_tag.string)
|
104 |
+
|
105 |
+
# Navigate to the "blob" > "rawLines" key for the JSON in the file
|
106 |
+
raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", [])
|
107 |
+
if raw_lines:
|
108 |
+
# The JSON content is in the first element of the rawLines list
|
109 |
+
json_content = raw_lines[0]
|
110 |
+
|
111 |
+
# Parse the JSON content
|
112 |
+
data = json.loads(json_content)
|
113 |
+
|
114 |
+
# Return the extracted JSON data
|
115 |
+
return {"success": True, "data": data}
|
116 |
+
else:
|
117 |
+
return {"success": False, "message": "JSON data not found in the 'rawLines' key."}
|
118 |
+
else:
|
119 |
+
return {"success": False, "message": "Could not find the <script> tag with embedded JSON data."}
|
120 |
+
except requests.exceptions.RequestException as e:
|
121 |
+
return {"success": False, "message": f"Error fetching data: {e}"}
|
122 |
+
except json.JSONDecodeError as je:
|
123 |
+
return {"success": False, "message": f"Error parsing JSON: {je}"}
|
124 |
+
|
125 |
+
|