Spaces:
Sleeping
Sleeping
import requests | |
from bs4 import BeautifulSoup | |
import json | |
TOKEN = "_octo=GH1.1.1509769180.1721774314; _device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; saved_user_sessions=155741452%3A7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; user_session=7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; __Host-user_session_same_site=7N0TQ967Rag6I8wQT1c4Ant_3hsfC8D1gDXrucvw0JA1OLuh; logged_in=yes; dotcom_user=omarnuwrar; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; _gh_sess=oyETeSUzPODINtXA9muci3RN0%2ByPh4et1k09FFSQadtrfdlJdLN4Hb77yeSq2QBSaTLHSC6UnJX5l3e%2BJ26mupuP%2BAjf5oHGSk6ptSkvNns5iPHx0ZgLFB5YGVPRqqnSu1F1PMuwuIKY5Km%2Fiw6wYAyou2WoRrllHvaW%2B%2FTYZDl%2BbBi3LEV4mCS9VDOQzseblH2j%2FqYRiGvok2aTj3AXTNnK0HQJ6O5bgCgCNtQQR8Q43G6if6NgQYA3RWvtJ1oOtxMvHKRSxHpCNgtzFMl%2F%2FuSgRwUdofsxuDT9IM076n7LMbNyO6lUY1Q5ww9oCxV0trPauDBc%2FsNa7lcqDP8RVOLZL2gmCXYXFwRWz%2B8%2B0S9gMptip%2FpZMd4%2BGZcK9k%2F0hswDEkuGFzm3r39Bw8nxCBQc6Yk%3D--VHpy7nUbR8UpQV2t--SkGKj5TPJ1F08U0anq2dfg%3D%3D" | |
# Step 1: Fetch the authenticity_token and commitOid from the GitHub edit page | |
def fetch_authenticity_token_and_commit_oid(): | |
url = "https://github.com/omarnuwrar/Trading/edit/main/deeper.json" | |
headers = { | |
"cookie": TOKEN, | |
"if-none-match": 'W/"2ff86bd1792cfee5ed79ee070b3b46de"', | |
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36", | |
"x-github-target": "dotcom", | |
"x-react-router": "json", | |
"x-requested-with": "XMLHttpRequest", | |
} | |
response = requests.get(url, headers=headers) | |
if response.status_code == 200: | |
soup = BeautifulSoup(response.text, 'html.parser') | |
script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"}) | |
if script_tag: | |
try: | |
json_data = json.loads(script_tag.string.strip()) | |
authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/deeper.json"]["post"] | |
commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"] | |
return authenticity_token, commit_oid | |
except (KeyError, json.JSONDecodeError) as e: | |
print(f"Error: Failed to extract data. Details: {str(e)}") | |
return None, None | |
else: | |
print("Error: Could not find the required <script> tag.") | |
return None, None | |
else: | |
print(f"Error: Failed to fetch the page. Status code: {response.status_code}") | |
return None, None | |
# Step 2: Send the POST request to update the deeper.json file | |
def update_user_json_file(authenticity_token, commit_oid, new_content): | |
url = "https://github.com/omarnuwrar/Trading/tree-save/main/deeper.json" | |
headers = { | |
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36", | |
"x-requested-with": "XMLHttpRequest", | |
"github-verified-fetch": "true", | |
"content-type": "application/x-www-form-urlencoded", | |
"cookie": TOKEN, | |
} | |
payload = { | |
"message": "Update deeper.json", | |
"placeholder_message": "Update deeper.json", | |
"description": "", | |
"commit-choice": "direct", | |
"target_branch": "main", | |
"quick_pull": "", | |
"guidance_task": "", | |
"commit": commit_oid, | |
"same_repo": "1", | |
"pr": "", | |
"content_changed": "true", | |
"filename": "deeper.json", | |
"new_filename": "deeper.json", | |
"value": new_content, | |
"authenticity_token": authenticity_token, | |
} | |
response = requests.post(url, headers=headers, data=payload) | |
if response.status_code == 200: | |
return {"success": True, "message": "deeper.json has been updated!"} | |
else: | |
return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text} | |
# Function to fetch and extract the JSON data | |
def fetch_json_from_github(): | |
# URL of the GitHub page | |
url = "https://github.com/omarnuwrar/Trading/blob/main/deeper.json" | |
# Custom headers | |
headers = { | |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36", | |
"Cookie": TOKEN | |
} | |
try: | |
# Fetch the HTML content of the page | |
response = requests.get(url, headers=headers) | |
response.raise_for_status() # Raise an exception for HTTP errors | |
# Parse the HTML using BeautifulSoup | |
soup = BeautifulSoup(response.text, 'html.parser') | |
# Find the <script> tag with type="application/json" and `data-target="react-app.embeddedData"` | |
script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'}) | |
if script_tag: | |
# Load the JSON content from the <script> tag | |
embedded_data = json.loads(script_tag.string) | |
# Navigate to the "blob" > "rawLines" key for the JSON in the file | |
raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", []) | |
if raw_lines: | |
# The JSON content is in the first element of the rawLines list | |
json_content = raw_lines[0] | |
# Parse the JSON content | |
data = json.loads(json_content) | |
# Return the extracted JSON data | |
return {"success": True, "data": data} | |
else: | |
return {"success": False, "message": "JSON data not found in the 'rawLines' key."} | |
else: | |
return {"success": False, "message": "Could not find the <script> tag with embedded JSON data."} | |
except requests.exceptions.RequestException as e: | |
return {"success": False, "message": f"Error fetching data: {e}"} | |
except json.JSONDecodeError as je: | |
return {"success": False, "message": f"Error parsing JSON: {je}"} | |