code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import smtplib, ssl
import json
import string
import random
import os
from twilio.rest import Client
import time
from termcolor import colored
from pyfiglet import figlet_format
import configparser
import colorama
import re
colorama.init()
names = ["users", "posts"]
for name in names:
file = open(f"{name}.json", "a+")
file.close()
class Alerts:
colorama.init()
with open("posts.json", "r+") as post:
if os.path.getsize("posts.json") != 0:
JOB_POSTS = json.load(post) # To make sure posts from previous users are not overwritten
else:
JOB_POSTS = {}
JOB_POSTS["alerts"] =[] # If there are no posts, then this would instantiated
def __init__(self, title, body, location, company, user):
self._title = title
self._body = body
self._location = location
self._company = company
self._user = user
self._store_alert()
def _store_alert(self):
user_alert = {}
user_alert["title"] = self._title
user_alert["body"] = self._body
user_alert["user"] = self._user
user_alert["company"] = self._company
user_alert["location"] = self._location
Alerts.JOB_POSTS["alerts"].append(user_alert)
with open("posts.json", "w") as posts:
json.dump(Alerts.JOB_POSTS, posts)
print(colored("Alert noted, thank you", "red"))
def get_alert(self):
alerts = []
print("\t\t\t\tFind an alert")
keyword = input("What is the keyword\n> ").lower()
if os.path.getsize("posts.json") == 0:
print(colored("\t\t\tNo post exists", "red"))
else:
with open("posts.json", "r") as posts:
posts = json.load(posts)
for alert in posts["alerts"]:
alert_values = " ".join(list(alert.values())).lower()
if alert_values.find(keyword) != -1:
alerts.append(alert)
if len(alerts) != 0:
for alert in alerts:
for key, values in alert.items():
print(f"\t\t{key}: {values}")
time.sleep(0.3)
print()
else:
print(colored("None exists", "red"))
def get_last_post(self):
with open("posts.json", "r") as posts:
posts = json.load(posts)
last_alert = posts["alerts"][-1]
return last_alert
def show_all_alerts(self):
""""Display all fake job alerts"""
if os.path.getsize("posts.json") == 0:
print(colored("\t\t\tNothing here to see", "red"))
else:
with open("posts.json", "r") as posts:
posts = json.load(posts)
print("All Recent Posts")
for alert in posts["alerts"]:
for key, value in alert.items():
print(f"\t\t{key}: {value}")
print()
time.sleep(0.5)
class Notifications:
""""
Gets all available email addresses and the phone numbers from the registered users
and sends all mails and sms respectively upon new post update
"""
def __init__(self):
"""Initialises the Email host paramters and reciepients"""
self._config = configparser.ConfigParser()
self._recipients = []
self._host = "smtp.gmail.com"
self._port = "587"
self._config.read("test.ini")
self._host_user = self._config["default"]["email_host_user"]
self._host_pass = self._config["default"]["email_host_password"]
self._phone = self._config["default"]["twilio_phone"]
self._account_sid = self._config["default"]["twilio_sid"]
self._twilio_token = self._config["default"]["twilio_token"]
def get_numbers_or_emails(self, name):
"""A function to populate the recipient's list with either emails or phone numbers"""
with open("users.json", "r") as users:
users = json.load(users)
for value in users["account"]:
self._recipients.append(value[name]) # use either phone or email
def send_email(self):
"""Send email to each registered user"""
recent_alert = Alerts.get_last_post(self)
self.get_numbers_or_emails("email")
context = ssl.create_default_context()
with smtplib.SMTP(self._host, self._port) as server:
server.starttls(context=context)
server.ehlo()
server.login(self._host_user, self._host_pass)
message = f'''
Hey,
There is a new update on the fake job alerts
NEW ALERTS
{recent_alert["title"].upper()}
{recent_alert["body"]}
written by {recent_alert["user"]}
'''
for email in self._recipients:
server.sendmail(self._host_user , email, message)
def send_sms(self):
"""Send sms to each registered user"""
client = Client(self._account_sid, self._twilio_token)
self.get_numbers_or_emails("phone")
for phone in self._recipients:
phone = "+234" + f"{phone}"
client.messages.create(to=phone, from_=self._phone, body="Hey, there is a new alert, log in to see")
class User:
colorama.init()
"""A user class to simulate the user"""
with open("users.json") as user:
if os.path.getsize("users.json") != 0:
USER_DETAILS = json.load(user) # To make sure user details are not overwritten
else:
USER_DETAILS = {}
USER_DETAILS["account"] =[]
def __init__(self):
"""Initialize the staff details to none. Useful for session"""
self._username = None
self._password = None
self._logged_in = False
print(colored(figlet_format("ALERTS FORUM", font="slant"), "green"))
print(colored("\t\t\t\tWelcome to Forward4-Alerts", "yellow"))
self._start()
def _start(self):
"""Start the simulation"""
print( colored("""
1. Register
2. Log in
3. Close App
""", "blue"))
response = ""
while response == "" or response > 3:
try:
response = int(input("\n> "))
except:
print("Value should be a number")
else:
if response == 1:
self._register_user()
elif response == 2:
self.log_in_user()
else:
self.close_app()
def _register_user(self):
print(""" Create a new account""")
details = {}
details["first_name"] = input("First name (!important)\n> ")
details["last_name"] = input("Last name\n> ")
details["username"] = input("Choose a username (!important)\n> ")
vool = True
while vool:
details["email"] = input("Email Address\n> ")
match = re.match(r"^[_A-z0-9-]+(\.[_A-z0-9-]+)*@[A-z0-9-]+(\.[A-z0-9-]+)*(\.[A-z]{2,4})$", details["email"])
if match != None:
vool = False
else:
print("Wrong email type")
something = True
while something:
details["phone"] = input("Phone\n>" )
try:
phone = int(details["phone"])
something = False
except ValueError:
print("Enter a Correct phone number")
something = True
if details["username"] == "" or details["first_name"] == "":
print("You can't leave important fields empty")
self._register_user()
else:
details["password"] = self._password_generator(details["first_name"], details["last_name"])
User.USER_DETAILS["account"].append(details)
with open("users.json", "w") as customer:
json.dump(User.USER_DETAILS, customer)
print("Account creation was successful, Please log in")
self.log_in_user()
def _password_generator(self, first_name, last_name):
"""Generates passord for each user upon registration"""
all_string = string.ascii_letters + string.ascii_lowercase + string.ascii_uppercase
rand = "".join(random.choice(all_string) for i in range(5))
password = first_name[:4] + rand + last_name[-3:]
print(colored(f"""
This is your generated password. Do well to copy
{password}
""", "red"))
return password
def log_in_user(self):
"""Authenticate and logs in the user """
print("\tPlease log in to your account")
tries = 3
print("Please input the correct details")
if os.path.getsize("users.json") == 0:
print(colored("No user exists, please create an account", "red"))
self._register_user()
else:
while not self._logged_in and tries > 0:
print(f"Note: you have {colored(tries, 'red')} tries")
username = input("What is your username ").lower()
password = input("Please input your password ")
with open("users.json", "r") as user_account:
staff = json.load(user_account)
for user in staff["account"]:
if username == user["username"].lower() and password == user["password"]:
self._username = username
self._password = password
self._logged_in = True
print("Login successful")
break
else:
self._logged_in = False
if not self._logged_in:
print("Wrong details")
tries -= 1
else:
if tries == 0 and not self._logged_in:
print("\t\tPlease create a new account as you can't access this")
self._start()
else:
self._create_session(action="logged in")
self._show_account_settings()
def _show_account_settings(self):
""" Actions to be perform on the account"""
print(colored(f"""\t\t\t\t Welcome, {self._username}
1. Create a new post
2. Search alerts
3. Show all alerts
4. Logout
""", "blue"))
response = ""
while response == "" or response > 4:
try:
response = int(input("\n> "))
except:
print("Value should be a number between 1 and 4")
else:
if response == 1:
self._create_post()
elif response == 2:
self.get_posts()
elif response == 3:
self.display_all_posts()
else:
self.logout()
def display_all_posts(self):
Alerts.show_all_alerts(self)
print("What else do you want to do? ")
self._show_account_settings()
def logout(self):
"""Logs out user and deletes session files"""
os.remove(f"{self._username}.txt")
self._username, self._password, self._logged_in = None, None, False
self._start()
def get_posts(self):
""""Search out instances of a particualar post"""
Alerts.get_alert(self)
self._show_account_settings()
def _create_session(self, action=None):
"""This creates a session as the staff performs an action"""
with open(f"{self._username}.txt", "a") as session:
session.write(f"{self._username} {action} \n")
def _create_post(self):
colorama.init()
""""Create a new alert and trigger the email module"""
print("\t\t\t\tPost an alert")
title = input("Subject\n> ")
body = input("Message\n> ")
location = input("Location\n> ")
company = input("Company's Details\n> ")
Alerts(title=title, body=body, location=location, company=company, user=self._username)
try:
p = Notifications()
p.send_sms()
p.send_email()
except Exception as e :
print(colored(f"Error sending alerts {e}","red"))
self._show_account_settings()
def close_app(self):
"""Close app"""
print("Bye, bye. Come back next time")
return 0
|
Alerts4
|
/Alerts4-0.0.5.tar.gz/Alerts4-0.0.5/forwardAlert4/alerts.py
|
alerts.py
|
import requests
import json
class Client:
def __init__(self, key):
self.key = key
self.base = "https://api.aletheiaapi.com/"
def StockData(self, symbol, summary = False, statistics = False):
url = self.base + f"StockData?key={self.key}&symbol={symbol}"
if summary: url = url + "&summary=true"
if statistics: url = url + "&statistics=true"
return json.loads(requests.get(url).text)
def Crypto(self, symbol):
url = self.base + f"Crypto?key={self.key}&symbol={symbol}"
return json.loads(requests.get(url).text)
def SearchEntities(self, term, top = 0):
url = self.base + f"SearchEntities?key={self.key}&term={term}"
if top > 0: url = url + f"&top={top}"
return json.loads(requests.get(url).text)
def GetEntity(self, id):
url = self.base + f"GetEntity?key={self.key}&id={id}"
return json.loads(requests.get(url).text)
def GetFiling(self, id = "", url = ""):
call = self.base + f"GetFiling?key={self.key}" # Here we use "call" since "url" is already a parameter.
if len(id) == 0 and len(url) == 0:
print("Please specify either the id or url of the filing.")
elif len(id) > 0: call = call + f"&id={id}"
else: call = call + f"&url={url}"
return json.loads(requests.get(call).text)
def LatestTransactions(self, issuer = "", owner = "", top = 20, before = None, securitytype = -1, transactiontype = -1, cascade = False):
url = self.base + f"LatestTransactions?key={self.key}&top={top}"
if len(issuer) > 0: url = url + f"&issuer={issuer}"
if len(owner) > 0: url = url + f"&owner={owner}"
if before != None: url = url + f"&before={before}"
if securitytype >= 0: url = url + f"&securitytype={securitytype}"
if transactiontype >= 0: url = url + f"&transactiontype={transactiontype}"
if cascade: url = url + "&cascade"
return json.loads(requests.get(url).text)
def AffiliatedOwners(self, id):
url = self.base + f"AffiliatedOwners?key={self.key}&id={id}"
return json.loads(requests.get(url).text)
def CommonFinancials(self, id, period = None, before = None):
url = self.base + f"CommonFinancials?key={self.key}&id={id}"
if period != None: url = url + f"&period={period}"
if before != None: url = url + f"&before={before}"
return json.loads(requests.get(url).text)
def FinancialFactTrend(self, id, label, period = None, after = None, before = None):
url = self.base + f"FinancialFactTrend?key={self.key}&id={id}&label={label}"
if period != None: url = url + f"&period={period}"
if after != None: url = url + f"&after={after}"
if before != None: url = url + f"&before={before}"
return json.loads(requests.get(url).text)
def SearchEarningsCalls(self, company = None, year = 0, quarter = None, top = 15):
url = self.base + f"SearchEarningsCalls?key={self.key}&top={top}"
if company != None: url = url + f"&company={company}"
if year > 0: url = url + f"&year={year}"
if quarter != None: url = url + f"&quarter={quarter}"
return json.loads(requests.get(url).text)
def EarningsCall(self, company, year, quarter, begin = None, end = None):
url = self.base + f"EarningsCall?key={self.key}&company={company}&year={year}&quarter={quarter}"
if begin != None: url = url + f"&begin={begin}"
if end != None: url = url + f"&end={end}"
return json.loads(requests.get(url).text)
def EarningsCallHighlights(self, company, year, quarter, category = None):
url = self.base + f"EarningsCallHighlights?key={self.key}&company={company}&year={year}&quarter={quarter}"
if category != None: url = url + f"&category={category}"
return json.loads(requests.get(url).text)
def EntityFilings(self, id, filing = None, before = None):
url = self.base + f"EntityFilings?key={self.key}&id:={id}"
if filing != None: url = url + f"&filing={filing}"
if before != None: url = url + f"&before={before}"
return json.loads(requests.get(url).text)
def OpenForm4(self, filingurl):
url = self.base + f"OpenForm4?key={self.key}&filingurl={filingurl}"
return json.loads(requests.get(url).text)
def OpenCommonFinancials(self, filingurl):
url = self.base + f"OpenCommonFinancials?key={self.key}&filingurl={filingurl}"
return json.loads(requests.get(url).text)
def consumption(self, begin = None, end = None, year = None, month = None): # Doesn't work yet
url = self.base + f"consumption?key={self.key}"
if begin != None: url = url + f"begin={begin}"
if end != None: url = url + f"end={end}"
if year != None: url = url + f"year={year}"
if month != None: url = url + f"month={month}"
return requests.get(url).text
def mycalls(self, last = None): # Doesn't work yet
url = self.base + f"mycalls?key={self.key}"
if last != None: url = url + f"&last={last}"
return json.loads(requests.get(url).text)
def version(self):
return requests.get(self.base + "version").text
def CountSecEntities(self, onlyco = False):
url = self.base + "CountSecEntities"
if onlyco: url = url + "?onlyco=true"
return json.loads(requests.get(url).text)
def CountSecFilings(self):
return requests.get(self.base + "CountSecFilings").text
def CountTransactions(self):
return requests.get(self.base + "CountTransactions").text
def CountFactContexts(self):
return requests.get(self.base + "CountFactContexts").text
def CountFinancialFacts(self, id = ""):
url = self.base + "CountFinancialFacts"
if len(id) > 0: url = url + f"?id={id}"
return requests.get(url).text
|
AletheiaPy
|
/AletheiaPy-0.0.4-py3-none-any.whl/Aletheia.py
|
Aletheia.py
|
import subprocess
import sys
from coalib.bearlib.abstractions.Linter import linter
from dependency_management.requirements.NpmRequirement import NpmRequirement
@linter(executable='alex',
output_format='regex',
output_regex=r'(?P<line>\d+):(?P<column>\d+)-(?P<end_line>\d+):'
r'(?P<end_column>\d+)\s+(?P<severity>warning)\s+'
r'(?P<message>.+)')
class AlexBear:
"""
Checks the markdown file with Alex - Catch insensitive, inconsiderate
writing.
Be aware that Alex and this bear only work on English text.
For more information, consult <https://www.npmjs.com/package/alex>.
"""
LANGUAGES = {'Natural Language'}
REQUIREMENTS = {NpmRequirement('alex', '3')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'[email protected]'}
LICENSE = 'AGPL-3.0'
@classmethod
def check_prerequisites(cls):
parent_prereqs = super().check_prerequisites()
if parent_prereqs is not True: # pragma: no cover
return parent_prereqs
incorrect_pkg_msg = (
'Please ensure that the package that has been installed is the '
"one to 'Catch insensitive, inconsiderate writing'. This can be "
'verified by running `alex --help` and seeing what it does.')
try:
output = subprocess.check_output(('alex', '--help'),
stderr=subprocess.STDOUT)
except (OSError, subprocess.CalledProcessError):
return ('The `alex` package could not be verified. ' +
incorrect_pkg_msg)
else:
output = output.decode(sys.getfilesystemencoding())
if 'Catch insensitive, inconsiderate writing' in output:
return True
else:
return ("The `alex` package that's been installed seems to "
'be incorrect. ' + incorrect_pkg_msg)
@staticmethod
def create_arguments(filename, file, config_file):
return filename,
|
AlexBear
|
/AlexBear-0.10.0.tar.gz/AlexBear-0.10.0/coalaAlexBear/AlexBear.py
|
AlexBear.py
|
import requests, os, json, hashlib
class AlexandriaException(Exception):
pass
class AlexandriaUploader():
def __init__(self, api_key, archive_url):
self.api_key = api_key
self.headers = {"Authorization": "Token " + api_key,
"content-type": "application/json"}
self.archive_url = archive_url
def make_request(self, uri, verb, content):
url = os.path.join(self.archive_url, uri)
if verb == "GET":
return requests.get(url, data=json.dumps(content), headers=self.headers)
elif verb == "POST":
return requests.post(url, data=json.dumps(content), headers=self.headers)
elif verb == "PUT":
return requests.put(url, data=json.dumps(content), headers=self.headers)
elif verb == "PATCH":
return requests.patch(url, data=json.dumps(content), headers=self.headers)
elif verb == "DELETE":
return requests.delete(url, data=json.dumps(content), headers=self.headers)
"""
Takes a build name, a dictionary of metadata, and a list of tags, and creates a
build on the archive.
Returns the id of the created build, or throws an exception.
"""
def create_build(self, name, metadata, tags=[]):
build_bundle = {"name": name, "metadata": metadata, "tags": tags}
r = self.make_request("api/build/", "POST", build_bundle)
try:
data = r.json()
except:
raise AlexandriaException("Archive did not return valid json: " +
r.content)
if "pk" in data:
return data["pk"]
elif "id" in data:
return data["id"]
raise AlexandriaException("Unexpected data returned while posting \
build: " + r.content)
"""
Takes a build id, an artifact category (in slug form), and the local filename of
an artifact to be uploaded. Calculates the size of the artifact, and an md5
checksum, and notifies the archive of a new artifact.
Returns the temporary PUTable upload URL returned by the archive.
"""
def notify_new_artifact(self, build_id, artifact_category, filename):
try:
file_size = os.path.getsize(filename)
md5 = hashlib.md5(open(filename, 'rb').read()).hexdigest()
except:
raise AlexandriaException("Error calculating file metadata. Did you\
provide a proper filename?")
artifact_bundle = {"category": artifact_category, "build": build_id,
"size": file_size, "checksum": md5}
r = self.make_request("api/artifact/", "PUT", artifact_bundle)
try:
data = r.json()
except:
raise AlexandriaException("Archive did not return valid json: " +
r.content)
if "url" in data:
return data['url']
else:
raise AlexandriaException("Archive did not return upload url! \
Response: " + r.content)
"""
Takes a filename and an upload URL and uploads the file.
"""
def do_upload(self, filename, upload_url):
curl_cmd = "curl --request PUT --upload-file %s '%s'" % (filename, upload_url)
out = os.system(curl_cmd)
if out != 0:
raise AlexandriaException("Uploading returned non-zero retval!")
"""
Takes a build id and an artifact category (in slug form) and notifies the
archive that the appropriate upload is complete.
"""
def verify_new_artifact(self, build_id, artifact_category):
patch_bundle = {"category": artifact_category, "build": build_id}
r = self.make_request("api/artifact/", "PATCH", patch_bundle)
try:
data = r.json()
except:
raise AlexandriaException("Archive did not return valid json: " +
r.content)
if "error" in data:
raise AlexandriaException("Archive returned error: " + r.content)
"""
The whole shebang
"""
def create_build_and_upload_artifacts(name, metadata, tags, artifacts,
api_key, archive_url):
uploader = AlexandriaUploader(api_key, archive_url)
build_id = uploader.create_build(name, metadata, tags)
for category, filename in artifacts.iteritems():
url = uploader.notify_new_artifact(build_id, category, filename)
uploader.do_upload(filename, url)
for category, filename in artifacts.iteritems():
uploader.verify_new_artifact(build_id, category)
def create_and_upload_from_build_manifest(manifest_name, api_key, archive_url):
manifest_data = json.load(open(manifest_name))
create_build_and_upload_artifacts(
manifest_data["name"],
manifest_data["metadata"],
manifest_data.get("tags", []),
manifest_data["artifacts"],
api_key,
archive_url
)
|
Alexandria-Upload-Utils
|
/Alexandria-Upload-Utils-0.3.0.tar.gz/Alexandria-Upload-Utils-0.3.0/alexandria_upload.py
|
alexandria_upload.py
|
*terra* package
---------------
*terra* is a tool for reproducing the abundance pattern of stars. *terra* can be used to modelate planet engulfment events. For more details, see our papers `Yana Galarza et al. 2016 <https://ui.adsabs.harvard.edu/abs/2016A%26A...589A..65G/abstract>`_ and `Yana Galarza et al. 2021 <https://ui.adsabs.harvard.edu/abs/2021arXiv210900679G>`_
Installation
------------
The only way to install *terra* is through pip::
pip install terra-2.0
If you already have *terra* installed, you should consider upgrading to the latest version via::
pip install terra-2.0 --upgrade
Dependencies
------------
The main dependencies of *terra* are `pandas <https://pandas.pydata.org/>`_, `NumPy <https://numpy.org/>`_, `Astropy <https://www.astropy.org/>`_, `matplotlib <https://matplotlib.org/>`_, `tqdm <https://tqdm.github.io/>`_, and `os <https://docs.python.org/3/library/os.html>`_.
These are installed using pip::
pip install pandas numpy astropy matplotlib tqdm numpy
Example usage
-------------
.. code-block:: python
from terra import pacha
# Computing the convective mass of a star with [Fe/H] = 0.164 dex
# and mass = 1.18 solar masses
# The mass can take values from 0.5 <= M <= 1.3 (solar mass)
# The [Fe/H] can take values from -1.0 <= [Fe/H] <= 0.3 (dex)
# By default the code computes the convective mass using the Yale isocrhones of stellar evolution
terra.cvmass(feh=0.1, mass=1)
# Computing the abundance pattern of a star with [Fe/H] = 0.164 dex and mass = 1.14 M_sun
# obs_abd.csv is a table containing the observed abundance.
terra.pacha(feh=0.164, mass=1 data_input='obs_abd.csv')
# If you want to save the outputs (figures and tables) with the star name (e.g., HIP 71726).
terra.pacha(feh=0.164, mass=1, Mcon=0.01, data_input='obs_abd.csv', data_output='HIP71726')
#For more details please see the terra's tutorial within the file terra_example file
Contributing
------------
*terra* is a tool that needs input to improve. Please contact me ([email protected]) if you have questions. Users are welcome to propose new features or report bugs by opening an issue on GitHub. A special thanks to my friend Marilia Carlos, who created the 'yale.txt' table to estimate convective masses.
Author
------
- `Jhon Yana Galarza <https://github.com/ramstojh>`_
Maintainers
-----------
- `Kayleigh Meneghini <https://github.com/kaykeigh>`_
Preferred citation
------------------
Please cite `Yana Galarza et al. 2016, A&A, 589, A65 <https://ui.adsabs.harvard.edu/abs/2016A%26A...589A..65G/abstract>`_ if you use this code in your
research. The BibTeX entry for the paper is:
.. code:: bibtex
@ARTICLE{Yana2016,
author = {{Galarza}, Jhon Yana and {Mel{\'e}ndez}, Jorge and {Cohen}, Judith G.},
title = "{Serendipitous discovery of the faint solar twin Inti 1}",
journal = {\aap},
keywords = {Sun: abundances, stars: abundances, stars: fundamental parameters, Earth, stars: solar-type, planetary systems, Astrophysics - Solar and Stellar Astrophysics},
year = 2016,
month = may,
volume = {589},
eid = {A65},
pages = {A65},
doi = {10.1051/0004-6361/201527477},
archivePrefix = {arXiv},
eprint = {1603.01245},
primaryClass = {astro-ph.SR},
adsurl = {https://ui.adsabs.harvard.edu/abs/2016A&A...589A..65G},
adsnote = {Provided by the SAO/NASA Astrophysics Data System}
}
License & attribution
---------------------
Copyright 2021, Jhon Yana Galarza.
The source code is made available under the terms of the MIT license.
If you make use of this code, please cite this package and its dependencies.
|
Alexandria11
|
/Alexandria11-0.2.tar.gz/Alexandria11-0.2/README.rst
|
README.rst
|
import glob
import re
import os
import sys
import fnmatch
import click
from pkg_resources import require
EXTES = ('.cpp','.cs','.c','.h','.css','.cjsx','.coffee','.ejs','.erl','.go',
'.html','.htm','.hbs','.handlebars','.hs','.hng','.hogan','.jade',
'.js','.es','.es6','.jsx','.less','.mustache','.php','.pl','.pm',
'.py','.rb','.sass','.scss','.sh','.zsh','.bash','.styl','.twig','.ts',)
COUNTER = 0
F_COUNTER = 0
SEARCHED = 0
def filter_files(files, lang=EXTES):
""" Filters files according to options """
lang_specific = []
for f in files:
if f.endswith(lang):
lang_specific.append(f)
return lang_specific
def get_gitignore(path):
""" Searches for gitignore file in current and parent directories """
if '.gitignore' in os.listdir(path):
return parse_gitignore(os.path.join(path, '.gitignore'))
else:
full_path = os.path.abspath(path)
if full_path == '/':
return
return get_gitignore(os.path.dirname(full_path))
def parse_gitignore(gipath):
""" Returns a list with gitignore's content """
gitignore_file = open(os.path.abspath(gipath), 'r')
gilist = []
for row in gitignore_file.readlines():
if not row.startswith('#') and row != '\n':
if row.endswith('/\n'):
gilist.append(row[:-2])
else:
gilist.append(row[:-1])
gitignore_file.close()
return gilist
def get_files(path):
""" Finds all files topdown the path excluding gitignore material """
# In case path is singular file:
if os.path.isfile(path):
return [path]
all_files = []
# Look for gitignore upstream
gilist = get_gitignore(path)
# In case path is directory:
# In case no gitignore was found in current directory or up
if not gilist:
for root, dirs, files in os.walk(path):
dirs[:] = [d for d in dirs if d[0] != '.']
# Constantly check for gitignore while walking
if '.gitignore' in os.listdir(root):
all_files.extend(get_files(root))
dirs[:] = []
files[:] = []
for name in files:
if not name.startswith('.'):
all_files.append(os.path.join(root, name))
# In case gitignore was found
if gilist:
for root, dirs, files in os.walk(path):
dirs[:] = [d for d in dirs if d[0] != '.' and d not in gilist]
# If root dir is in gitignore break and go to next directory
for item in gilist:
if fnmatch.fnmatch(root, item):
dirs[:] = []
break
else:
# If file is gitignore material break and go to next file
for name in files:
for item in gilist:
if fnmatch.fnmatch(name, item) or item.endswith(name):
break
else:
# Finally append the file if it passed all tests
if not name.startswith('.') and name.endswith(EXTES):
all_files.append(os.path.join(root, name))
return all_files
def pretty_print(linenum, todo):
""" Prints a table with all the found todos """
global COUNTER
comm_endings = ['"""', "'''", '*/', '-->', '#}', '--}}', '}}', '%>']
for i in comm_endings:
if todo.endswith(i):
todo = todo[:-len(i)]
print(' line', linenum.rjust(4), '>>\t', todo )
COUNTER += 1
def search_todo(filtered_files):
""" Extracts a todo from file, feeds todos in printing function """
global F_COUNTER
global SEARCHED
todo = re.compile('\\bTODO\\b.*')
fixme = re.compile('\\bFIXME\\b.*')
for files in filtered_files:
f = open(os.path.abspath(files), 'r')
printed = False
SEARCHED += 1
for n, row in enumerate(f.readlines()):
found_todo = todo.search(row)
found_fixme = fixme.search(row)
if found_todo or found_fixme:
if not printed:
print('')
click.secho(files, fg='blue', bold=True)
printed = True
F_COUNTER += 1
if found_todo:
pretty_print(str(n+1), found_todo.group())
else:
pretty_print(str(n+1), found_fixme.group())
f.close()
def report():
""" Prints a report at the end of the search """
global COUNTER
print('\n\n')
print('Searched {0} files'.format(SEARCHED))
print('Found {0} TODOs in {1} files'.format(COUNTER, F_COUNTER))
@click.group(invoke_without_command=True)
@click.option('-v', '--version', is_flag=True, help='Return the current version.')
@click.option('-o', '--only', help='Specify language extension to search. Extension form: .extension')
@click.option('-x', '--exclude', help='Specify extension to exclude from search.')
@click.argument('path', required=False)
def cli(version, path, only, exclude):
""" Extract TODO comment tags from source files """
if not path and not version:
click.echo('Missing argument PATH')
if path:
files = get_files(path)
if only:
filtered_files = filter_files(files, only)
elif exclude:
filtered_files = filter_files(files, tuple(x for x in EXTES if x != exclude))
else:
filtered_files = files
search_todo(filtered_files)
report()
if version:
print(require('alfe')[0].version)
|
Alfe
|
/Alfe-0.1.0.tar.gz/Alfe-0.1.0/index.py
|
index.py
|
<div align="center">
<img src="./icon.png" alt="Alfred-PyWorkflow logo" height="200">
</div>
Alfred-PyWorkflow
===============
A helper library in Python for authors of workflows for [Alfred 4 and 5][alfred].
[![Build Status][shield-github]][action-github]
[![Coverage Status][shield-coveralls]][coveralls]
[![Development Status][shield-status]][pypi]
[![Latest Version][shield-version]][pypi]
[![Supported Python Versions][shield-pyversions]][pypi]
[![Downloads][shield-download]][pypi]
Supports Alfred 4 and Alfred 5 on macOS Catalina or later with Python 3.7+.
Alfred-PyWorkflow is a Python 3 port of the original [Alfred-Workflow][alfred-workflow].
Alfred-PyWorkflow takes the grunt work out of writing a workflow by giving you the tools to create
a fast and featureful Alfred workflow from an API, application or library in minutes.
Always supports all current Alfred features.
Features
--------
- Auto-saved settings API for your workflow
- Super-simple data caching with expiry
- Fuzzy, Alfred-like search/filtering with diacritic folding
- Keychain support for secure storage of passwords, API keys etc.
- Lightweight web API with [Requests][requests]-like interface
- Background tasks to keep your workflow responsive
- Simple generation of Alfred JSON feedback
- Full support of Alfred's AppleScript/JXA API
- Catches and logs workflow errors for easier development and support
- "Magic" arguments to help development/debugging
- Pre-configured logging
- Automatically check for workflow updates via GitHub releases
- Post notifications via Notification Center
- Advanced modifiers
- Set [workflow variables][workflow-variables] from code
- Re-running Script Filters
Contents
--------
<!-- MarkdownTOC autolink="true" bracket="round" depth="3" autoanchor="true" -->
- [Installation](#installation)
- [With pip](#with-pip)
- [From source](#from-source)
- [Usage](#usage)
- [Workflow script skeleton](#workflow-script-skeleton)
- [Examples](#examples)
- [Web](#web)
- [Keychain access](#keychain-access)
- [Documentation](#documentation)
- [Dash docset](#dash-docset)
- [Licensing, thanks](#licensing-thanks)
- [Contributing](#contributing)
- [Adding a workflow to the list](#adding-a-workflow-to-the-list)
- [Bug reports, pull requests](#bug-reports-pull-requests)
- [Contributors](#contributors)
- [Workflows using Alfred-PyWorkflow](#workflows-using-alfred-pyworkflow)
<!-- /MarkdownTOC -->
<a name="installation"></a>
Installation
------------
**Note**: If you're new to Alfred workflows, check out
[the tutorial][docs-tutorial] in the docs.
<a name="with-pip"></a>
### With pip ###
You can install Alfred-PyWorkflow directly into your workflow with:
```zsh
# from your workflow directory
pip install --target=. Alfred-PyWorkflow
```
You can install any other library available on the [Cheese Shop][cheeseshop] the same way. See the
[pip documentation][pip-docs] for more information.
It is highly advisable to bundle all your workflow's dependencies with your workflow in this way.
That way, it will "just work".
<a name="from-source"></a>
### From source ###
1. Download the `alfred-pyworkflow-X.X.X.zip` from the [GitHub releases page][releases].
2. Extract the ZIP archive and place the `workflow` directory in the root folder of your workflow
(where `info.plist` is).
Your workflow directory should look something like this (where `yourscript.py` contains your
workflow code and `info.plist` is the workflow information file generated by Alfred):
Your Workflow/
info.plist
icon.png
workflow/
__init__.py
background.py
notify.py
update.py
version
web.py
workflow.py
yourscript.py
etc.
Alternatively, you can clone/download the Alfred-PyWorkflow [GitHub repository][repo] and copy the
`workflow` subdirectory to your workflow's root directory.
<a name="usage"></a>
Usage
-----
A few examples of how to use Alfred-PyWorkflow.
<a name="workflow-script-skeleton"></a>
### Workflow script skeleton ###
Set up your workflow scripts as follows (if you wish to use the built-in error handling or `sys.path` modification):
```python
#!/usr/bin/env python3
# encoding: utf-8
import sys
from workflow import Workflow
def main(wf):
# The Workflow instance will be passed to the function
# you call from `Workflow.run`.
# Not super useful, as the `wf` object created in
# the `if __name__ ...` clause below is global...
#
# Your imports go here if you want to catch import errors, which
# is not a bad idea, or if the modules/packages are in a directory
# added via `Workflow(libraries=...)`
import somemodule
import anothermodule
# Get args from Workflow, already as normalized string.
# This is also necessary for "magic" arguments to work.
args = wf.args
# Do stuff here ...
# Add an item to Alfred feedback
wf.add_item('Item title', 'Item subtitle')
# Send output to Alfred. You can only call this once.
# Well, you *can* call it multiple times, but subsequent calls
# are ignored (otherwise the JSON sent to Alfred would be invalid).
wf.send_feedback()
if __name__ == '__main__':
# Create a global `Workflow` object
wf = Workflow()
# Call your entry function via `Workflow.run()` to enable its
# helper functions, like exception catching, ARGV normalization,
# magic arguments etc.
sys.exit(wf.run(main))
```
<a name="examples"></a>
### Examples ###
Cache data for 30 seconds:
```python
def get_web_data():
return web.get('http://www.example.com').json()
def main(wf):
# Save data from `get_web_data` for 30 seconds under
# the key ``example``
data = wf.cached_data('example', get_web_data, max_age=30)
for datum in data:
wf.add_item(datum['title'], datum['author'])
wf.send_feedback()
```
<a name="web"></a>
#### Web ####
Grab data from a JSON web API:
```python
data = web.get('http://www.example.com/api/1/stuff').json()
```
Post a form:
```python
r = web.post('http://www.example.com/',
data={'artist': 'Tom Jones', 'song': "It's not unusual"})
```
Upload a file:
```python
files = {'fieldname' : {'filename': "It's not unusual.mp3",
'content': open("It's not unusual.mp3", 'rb').read()}
}
r = web.post('http://www.example.com/upload/', files=files)
```
<a name="keychain-access"></a>
#### Keychain access ####
Save password:
```python
wf = Workflow()
wf.save_password('name of account', 'password1lolz')
```
Retrieve password:
```python
wf = Workflow()
wf.get_password('name of account')
```
<a name="documentation"></a>
Documentation
-------------
The full documentation, including API docs and a tutorial, can be found at [xdevcloud.de][docs].
<a name="dash-docset"></a>
### Dash docset ###
The documentation is also available as a [Dash docset][dash].
<a name="licensing-thanks"></a>
Licensing, thanks
-----------------
The code and the documentation are released under the MIT and [Creative Commons Attribution-NonCommercial][cc] licences respectively. See [LICENCE.txt](LICENCE.txt) for details.
The documentation was generated using [Sphinx][sphinx] and the [Alabaster][alabaster] theme by [bitprophet][bitprophet].
Many of the cooler ideas in Alfred-PyWorkflow were inspired by [Alfred2-Ruby-Template][ruby-template] by Zhaocai.
The Keychain parser was based on [Python-Keyring][python-keyring] by Jason R. Coombs.
<a name="contributing"></a>
Contributing
------------
<a name="adding-a-workflow-to-the-list"></a>
### Adding a workflow to the list ###
If you want to add a workflow to the [list of workflows using Alfred-PyWorkflow][docs-workflows], **don't add it to the docs!** The list is machine-generated from the [`library_workflows.tsv`](extras/library_workflows.tsv) file. Please add it to [`library_workflows.tsv`](extras/library_workflows.tsv), and submit a corresponding pull request.
The list is not auto-updated, so if you've released a workflow and are keen to see it in this list, please [open an issue][issues] asking me to update the list.
<a name="bug-reports-pull-requests"></a>
### Bug reports, pull requests ###
Please see [the documentation][docs-contributing].
<a name="contributors"></a>
### Contributors ###
- [Thomas Harr][harrtho]
- [Dean Jackson][deanishe]
- [Stephen Margheim][smargh]
- [Fabio Niephaus][fniephaus]
- [Owen Min][owenwater]
<a name="workflows-using-alfred-pyworkflow"></a>
Workflows using Alfred-PyWorkflow
-------------------------------
[Here is a list][docs-workflows] of some of the many workflows based on Alfred-PyWorkflow.
[alfred]: http://www.alfredapp.com/
[alabaster]: https://github.com/bitprophet/alabaster
[alfred-workflow]: https://github.com/deanishe/alfred-workflow
[bitprophet]: https://github.com/bitprophet
[cc]: https://creativecommons.org/licenses/by-nc/4.0/legalcode
[coveralls]: https://coveralls.io/r/harrtho/alfred-pyworkflow?branch=main
[deanishe]: https://github.com/deanishe
[docs-contributing]: https://xdevcloud.de/alfred-pyworkflow/contributing.html
[docs-tutorial]: https://xdevcloud.de/alfred-pyworkflow/tutorial.html
[docs]: https://xdevcloud.de/alfred-pyworkflow/
[docs-workflows]: https://xdevcloud.de/alfred-pyworkflow/aw-workflows.html
[dash]: https://github.com/harrtho/alfred-pyworkflow/raw/main/docs/Alfred-PyWorkflow.docset.zip
[fniephaus]: https://github.com/fniephaus
[harrtho]: https://github.com/harrtho
[issues]: https://github.com/harrtho/alfred-pyworkflow/issues
[owenwater]: https://github.com/owenwater
[pypi]: https://pypi.python.org/pypi/Alfred-PyWorkflow/
[releases]: https://github.com/harrtho/alfred-pyworkflow/releases
[repo]: https://github.com/harrtho/alfred-pyworkflow
[requests]: http://docs.python-requests.org/en/latest/
[shield-coveralls]: https://coveralls.io/repos/github/harrtho/alfred-pyworkflow/badge.svg?branch=main
[shield-download]: https://img.shields.io/pypi/dm/Alfred-PyWorkflow.svg?style=flat
[shield-github]: https://github.com/harrtho/alfred-pyworkflow/workflows/CI/badge.svg
[action-github]: https://github.com/harrtho/alfred-pyworkflow/actions?query=workflow%3ACI
[shield-status]: https://img.shields.io/pypi/status/Alfred-PyWorkflow.svg?style=flat
[shield-version]: https://img.shields.io/pypi/v/Alfred-PyWorkflow.svg?style=flat
[shield-pyversions]: https://img.shields.io/pypi/pyversions/Alfred-PyWorkflow.svg?style=flat
[smargh]: https://github.com/smargh
[sphinx]: http://sphinx-doc.org/
[cheeseshop]: https://pypi.org
[pip-docs]: https://pip.pypa.io/en/latest/
[ruby-template]: http://zhaocai.github.io/alfred2-ruby-template/
[python-keyring]: https://pypi.python.org/pypi/keyring
[workflow-variables]: https://xdevcloud.de/alfred-pyworkflow/guide/variables.html#workflow-variables
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/README.md
|
README.md
|
A helper library in Python for authors of workflows for `Alfred 4 and 5`_.
Supports Alfred 4 and Alfred 5 on macOS with Python 3.7+.
`Alfred-PyWorkflow`_ is a Python 3 port of the original `Alfred-Workflow`_.
Alfred-PyWorkflow takes the grunt work out of writing a workflow by giving you the tools to create
a fast and featureful Alfred workflow from an API, application or library in minutes.
Always supports all current Alfred features.
http://www.xdevcloud.de/alfred-pyworkflow/
Features
========
* Auto-saves settings
* Super-simple data caching with expiry
* Fuzzy, Alfred-like search/filtering with diacritic folding
* Keychain support for secure storage of passwords, API keys etc.
* Lightweight web API with `requests`_-like interface
* Background tasks to keep your workflow responsive
* Simple generation of Alfred JSON feedback
* Full support of Alfred's AppleScript/JXA API
* Catches and logs workflow errors for easier development and support
* "Magic" arguments to help development/debugging
* Pre-configured logging
* Automatically check for workflow updates via GitHub releases
* Post notifications via Notification Center
* Advanced modifiers
* Set `workflow variables`_ from code
* Re-running Script Filters
Installation
============
**Note**: If you're new to Alfred workflows, check out
`the tutorial`_ in the docs.
With pip
--------
You can install Alfred-PyWorkflow directly into your workflow with::
# from your workflow directory
pip install --target=. Alfred-PyWorkflow
You can install any other library available on the `Cheese Shop`_ the same way. See the
`pip documentation`_ for more information.
It is highly advisable to bundle all your workflow's dependencies with your workflow in this way.
That way, it will "just work".
From source
-----------
1. Download the ``alfred-pyworkflow-X.X.X.zip`` file from the `GitHub releases page`_.
2. Extract the ZIP archive and place the `workflow` directory in the root folder of your workflow
(where ``info.plist`` is).
Your workflow directory should look something like this (where ``yourscript.py`` contains your
workflow code and ``info.plist`` is the workflow information file generated by Alfred)::
Your Workflow/
info.plist
icon.png
workflow/
__init__.py
background.py
notify.py
update.py
version
web.py
workflow.py
yourscript.py
etc.
Alternatively, you can clone/download the Alfred-PyWorkflow `GitHub repository`_ and copy the
``workflow`` subfolder to your workflow's root directory.
Usage
=============
A few examples of how to use Alfred-PyWorkflow.
Workflow script skeleton
------------------------
.. code-block:: python
#!/usr/bin/env python3
# encoding: utf-8
import sys
from workflow import Workflow
def main(wf):
# The Workflow instance will be passed to the function
# you call from `Workflow.run`.
# Not super useful, as the `wf` object created in
# the `if __name__ ...` clause below is global...
#
# Your imports go here if you want to catch import errors, which
# is not a bad idea, or if the modules/packages are in a directory
# added via `Workflow(libraries=...)`
import somemodule
import anothermodule
# Get args from Workflow, already as normalized string.
# This is also necessary for "magic" arguments to work.
args = wf.args
# Do stuff here ...
# Add an item to Alfred feedback
wf.add_item('Item title', 'Item subtitle')
# Send output to Alfred. You can only call this once.
# Well, you *can* call it multiple times, but subsequent calls
# are ignored (otherwise the JSON sent to Alfred would be invalid).
wf.send_feedback()
if __name__ == '__main__':
# Create a global `Workflow` object
wf = Workflow()
# Call your entry function via `Workflow.run()` to enable its
# helper functions, like exception catching, ARGV normalization,
# magic arguments etc.
sys.exit(wf.run(main))
Examples
--------
Cache data for 30 seconds:
.. code-block:: python
def get_web_data():
return web.get('http://www.example.com').json()
def main(wf):
# Save data from `get_web_data` for 30 seconds under
# the key ``example``
data = wf.cached_data('example', get_web_data, max_age=30)
for datum in data:
wf.add_item(datum['title'], datum['author'])
wf.send_feedback()
Web
---
Grab data from a JSON web API:
.. code-block:: python
data = web.get('http://www.example.com/api/1/stuff').json()
Post a form:
.. code-block:: python
r = web.post('http://www.example.com/',
data={'artist': 'Tom Jones', 'song': "It's not unusual"})
Upload a file:
.. code-block:: python
files = {'fieldname' : {'filename': "It's not unusual.mp3",
'content': open("It's not unusual.mp3", 'rb').read()}
}
r = web.post('http://www.example.com/upload/', files=files)
Keychain access
---------------
Save password:
.. code-block:: python
wf = Workflow()
wf.save_password('name of account', 'password1lolz')
Retrieve password:
.. code-block:: python
wf = Workflow()
wf.get_password('name of account')
Documentation
=============
The full documentation, including API docs and a tutorial, can be found at `xdevcloud.de/alfred-pyworkflow`_.
.. _requests: http://docs.python-requests.org/en/latest/
.. _Alfred-PyWorkflow: https://github.com/harrtho/alfred-pyworkflow
.. _Alfred-Workflow: https://github.com/deanishe/alfred-workflow
.. _Alfred 4 and 5: http://www.alfredapp.com/
.. _GitHub releases page: https://github.com/harrtho/alfred-pyworkflow/releases
.. _the tutorial: https://xdevcloud.de/alfred-pyworkflow/tutorial.html
.. _GitHub repository: https://github.com/harrtho/alfred-pyworkflow
.. _Cheese Shop: https://pypi.org
.. _pip documentation: https://pip.pypa.io/en/latest/
.. _workflow variables: https://xdevcloud.de/alfred-pyworkflow/guide/variables.html#workflow-variables
.. _xdevcloud.de/alfred-pyworkflow: https://xdevcloud.de/alfred-pyworkflow/
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/README_PYPI.rst
|
README_PYPI.rst
|
import atexit
import errno
import fcntl
import functools
import json
import os
import signal
import subprocess
import sys
import time
from collections import namedtuple
from contextlib import contextmanager
from threading import Event
# JXA scripts to call Alfred's API via the Scripting Bridge
# {app} is automatically replaced with "Alfred 3" or
# "com.runningwithcrayons.Alfred" depending on version.
#
# Open Alfred in search (regular) mode
JXA_SEARCH = 'Application({app}).search({arg});'
# Open Alfred's File Actions on an argument
JXA_ACTION = 'Application({app}).action({arg});'
# Open Alfred's navigation mode at path
JXA_BROWSE = 'Application({app}).browse({arg});'
# Set the specified theme
JXA_SET_THEME = 'Application({app}).setTheme({arg});'
# Call an External Trigger
JXA_TRIGGER = 'Application({app}).runTrigger({arg}, {opts});'
# Save a variable to the workflow configuration sheet/info.plist
JXA_SET_CONFIG = 'Application({app}).setConfiguration({arg}, {opts});'
# Delete a variable from the workflow configuration sheet/info.plist
JXA_UNSET_CONFIG = 'Application({app}).removeConfiguration({arg}, {opts});'
# Tell Alfred to reload a workflow from disk
JXA_RELOAD_WORKFLOW = 'Application({app}).reloadWorkflow({arg});'
class AcquisitionError(Exception):
"""Raised if a lock cannot be acquired."""
AppInfo = namedtuple('AppInfo', ['name', 'path', 'bundleid'])
"""Information about an installed application.
Returned by :func:`appinfo`. All attributes are str.
.. py:attribute:: name
Name of the application, e.g. ``'Safari'``.
.. py:attribute:: path
Path to the application bundle, e.g. ``'/Applications/Safari.app'``.
.. py:attribute:: bundleid
Application's bundle ID, e.g. ``'com.apple.Safari'``.
"""
def jxa_app_name():
"""Return name of application to call currently running Alfred.
.. versionadded: 1.37
.. versionchanged:: 2.0
Returns 'com.runningwithcrayons.Alfred' the bundleID of Alfred 4+ versions
This name is suitable for use with ``Application(name)`` in JXA.
Returns:
str: bundleID.
"""
# Alfred 4+
return 'com.runningwithcrayons.Alfred'
def unicodify(s, encoding='utf-8', norm=None):
"""Ensure string is Unicode.
.. versionadded:: 1.31
Decode encoded strings using ``encoding`` and normalise Unicode
to form ``norm`` if specified.
Args:
s (str): String to decode. May also be Unicode.
encoding (str, optional): Encoding to use on bytestrings.
norm (None, optional): Normalisation form to apply to Unicode string.
Returns:
str: Decoded, optionally normalised, Unicode string.
"""
if not isinstance(s, str):
s = str(s, encoding)
if norm:
from unicodedata import normalize
s = normalize(norm, s)
return s
def applescriptify(s):
"""Escape string for insertion into an AppleScript string.
.. versionadded:: 1.31
Replaces ``"`` with `"& quote &"`. Use this function if you want
to insert a string into an AppleScript script:
>>> applescriptify('g "python" test')
'g " & quote & "python" & quote & "test'
Args:
s (str): Unicode string to escape.
Returns:
str: Escaped string.
"""
return s.replace('"', '" & quote & "')
def run_command(cmd, **kwargs):
"""Run a command and return the output.
.. versionadded:: 1.31
A thin wrapper around :func:`subprocess.check_output` that ensures
all arguments are encoded to UTF-8 first.
Args:
cmd (list): Command arguments to pass to :func:`~subprocess.check_output`.
**kwargs: Keyword arguments to pass to :func:`~subprocess.check_output`.
Returns:
str: Output returned by :func:`~subprocess.check_output`.
"""
cmd = [str(s) for s in cmd]
return subprocess.check_output(cmd, **kwargs)
def run_applescript(script, *args, **kwargs):
"""Execute an AppleScript script and return its output.
.. versionadded:: 1.31
Run AppleScript either by filepath or code. If ``script`` is a valid
filepath, that script will be run, otherwise ``script`` is treated
as code.
Args:
script (str, optional): Filepath of script or code to run.
*args: Optional command-line arguments to pass to the script.
**kwargs: Pass ``lang`` to run a language other than AppleScript.
Any other keyword arguments are passed to :func:`run_command`.
Returns:
str: Output of run command.
"""
lang = 'AppleScript'
if 'lang' in kwargs:
lang = kwargs['lang']
del kwargs['lang']
cmd = ['/usr/bin/osascript', '-l', lang]
if os.path.exists(script):
cmd += [script]
else:
cmd += ['-e', script]
cmd.extend(args)
return run_command(cmd, **kwargs)
def run_jxa(script, *args):
"""Execute a JXA script and return its output.
.. versionadded:: 1.31
Wrapper around :func:`run_applescript` that passes ``lang=JavaScript``.
Args:
script (str): Filepath of script or code to run.
*args: Optional command-line arguments to pass to script.
Returns:
str: Output of script.
"""
return run_applescript(script, *args, lang='JavaScript')
def run_trigger(name, bundleid=None, arg=None):
"""Call an Alfred External Trigger.
.. versionadded:: 1.31
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
name (str): Name of External Trigger to call.
bundleid (str, optional): Bundle ID of workflow trigger belongs to.
arg (str, optional): Argument to pass to trigger.
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
opts = {'inWorkflow': bundleid}
if arg:
opts['withArgument'] = arg
script = JXA_TRIGGER.format(app=json.dumps(appname),
arg=json.dumps(name),
opts=json.dumps(opts, sort_keys=True))
run_applescript(script, lang='JavaScript')
def set_theme(theme_name):
"""Change Alfred's theme.
.. versionadded:: 1.39.0
Args:
theme_name (str): Name of theme Alfred should use.
"""
appname = jxa_app_name()
script = JXA_SET_THEME.format(app=json.dumps(appname),
arg=json.dumps(theme_name))
run_applescript(script, lang='JavaScript')
def set_config(name, value, bundleid=None, exportable=False):
"""Set a workflow variable in ``info.plist``.
.. versionadded:: 1.33
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
name (str): Name of variable to set.
value (str): Value to set variable to.
bundleid (str, optional): Bundle ID of workflow variable belongs to.
exportable (bool, optional): Whether variable should be marked
as exportable (Don't Export checkbox).
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
opts = {
'toValue': value,
'inWorkflow': bundleid,
'exportable': exportable,
}
script = JXA_SET_CONFIG.format(app=json.dumps(appname),
arg=json.dumps(name),
opts=json.dumps(opts, sort_keys=True))
run_applescript(script, lang='JavaScript')
def unset_config(name, bundleid=None):
"""Delete a workflow variable from ``info.plist``.
.. versionadded:: 1.33
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
name (str): Name of variable to delete.
bundleid (str, optional): Bundle ID of workflow variable belongs to.
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
opts = {'inWorkflow': bundleid}
script = JXA_UNSET_CONFIG.format(app=json.dumps(appname),
arg=json.dumps(name),
opts=json.dumps(opts, sort_keys=True))
run_applescript(script, lang='JavaScript')
def search_in_alfred(query=None):
"""Open Alfred with given search query.
.. versionadded:: 1.39.0
Omit ``query`` to simply open Alfred's main window.
Args:
query (str, optional): Search query.
"""
query = query or ''
appname = jxa_app_name()
script = JXA_SEARCH.format(app=json.dumps(appname), arg=json.dumps(query))
run_applescript(script, lang='JavaScript')
def browse_in_alfred(path):
"""Open Alfred's filesystem navigation mode at ``path``.
.. versionadded:: 1.39.0
Args:
path (str): File or directory path.
"""
appname = jxa_app_name()
script = JXA_BROWSE.format(app=json.dumps(appname), arg=json.dumps(path))
run_applescript(script, lang='JavaScript')
def action_in_alfred(paths):
"""Action the give filepaths in Alfred.
.. versionadded:: 1.39.0
Args:
paths (list): Unicode paths to files/directories to action.
"""
appname = jxa_app_name()
script = JXA_ACTION.format(app=json.dumps(appname), arg=json.dumps(paths))
run_applescript(script, lang='JavaScript')
def reload_workflow(bundleid=None):
"""Tell Alfred to reload a workflow from disk.
.. versionadded:: 1.39.0
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
bundleid (str, optional): Bundle ID of workflow to reload.
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
script = JXA_RELOAD_WORKFLOW.format(app=json.dumps(appname),
arg=json.dumps(bundleid))
run_applescript(script, lang='JavaScript')
def appinfo(name):
"""Get information about an installed application.
.. versionadded:: 1.31
Args:
name (str): Name of application to look up.
Returns:
AppInfo: :class:`AppInfo` tuple or ``None`` if app isn't found.
"""
cmd = [
'mdfind',
'-onlyin', '/Applications',
'-onlyin', '/System/Applications',
'-onlyin', os.path.expanduser('~/Applications'),
'(kMDItemContentTypeTree == com.apple.application &&'
'(kMDItemDisplayName == "{0}" || kMDItemFSName == "{0}.app"))'
.format(name)
]
output = run_command(cmd).strip()
if not output:
return None
path = str(output, 'utf-8').split('\n')[0]
cmd = ['mdls', '-raw', '-name', 'kMDItemCFBundleIdentifier', path]
bid = run_command(cmd).strip()
if not bid: # pragma: no cover
return None
return AppInfo(unicodify(name), unicodify(path), unicodify(bid))
@contextmanager
def atomic_writer(fpath, mode):
"""Atomic file writer.
.. versionadded:: 1.12
Context manager that ensures the file is only written if the write
succeeds. The data is first written to a temporary file.
:param fpath: path of file to write to.
:type fpath: ``str``
:param mode: sames as for :func:`open`
:type mode: string
"""
suffix = '.{}.tmp'.format(os.getpid())
temppath = fpath + suffix
with open(temppath, mode) as fp:
try:
yield fp
os.rename(temppath, fpath)
finally:
try:
os.remove(temppath)
except (OSError, IOError):
pass
class LockFile(object):
"""Context manager to protect filepaths with lockfiles.
.. versionadded:: 1.13
Creates a lockfile alongside ``protected_path``. Other ``LockFile``
instances will refuse to lock the same path.
>>> path = '/path/to/file'
>>> with LockFile(path):
>>> with open(path, 'wb') as fp:
>>> fp.write(data)
Args:
protected_path (str): File to protect with a lockfile
timeout (float, optional): Raises an :class:`AcquisitionError`
if lock cannot be acquired within this number of seconds.
If ``timeout`` is 0 (the default), wait forever.
delay (float, optional): How often to check (in seconds) if
lock has been released.
Attributes:
delay (float): How often to check (in seconds) whether the lock
can be acquired.
lockfile (str): Path of the lockfile.
timeout (float): How long to wait to acquire the lock.
"""
def __init__(self, protected_path, timeout=0.0, delay=0.05):
"""Create new :class:`LockFile` object."""
self.lockfile = protected_path + '.lock'
self._lockfile = None
self.timeout = timeout
self.delay = delay
self._lock = Event()
atexit.register(self.release)
@property
def locked(self):
"""``True`` if file is locked by this instance."""
return self._lock.is_set()
def acquire(self, blocking=True):
"""Acquire the lock if possible.
If the lock is in use and ``blocking`` is ``False``, return
``False``.
Otherwise, check every :attr:`delay` seconds until it acquires
lock or exceeds attr:`timeout` and raises an :class:`AcquisitionError`.
"""
if self.locked and not blocking:
return False
start = time.time()
while True:
# Raise error if we've been waiting too long to acquire the lock
if self.timeout and (time.time() - start) >= self.timeout:
raise AcquisitionError('lock acquisition timed out')
# If already locked, wait then try again
if self.locked:
time.sleep(self.delay)
continue
# Create in append mode so we don't lose any contents
if self._lockfile is None:
self._lockfile = open(self.lockfile, 'a')
# Try to acquire the lock
try:
fcntl.lockf(self._lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
self._lock.set()
break
except IOError as err: # pragma: no cover
if err.errno not in (errno.EACCES, errno.EAGAIN):
raise
# Don't try again
if not blocking: # pragma: no cover
return False
# Wait, then try again
time.sleep(self.delay)
return True
def release(self):
"""Release the lock by deleting `self.lockfile`."""
if not self._lock.is_set():
return False
try:
fcntl.lockf(self._lockfile, fcntl.LOCK_UN)
except IOError: # pragma: no cover
pass
finally:
self._lock.clear()
self._lockfile = None
try:
os.unlink(self.lockfile)
except (IOError, OSError): # pragma: no cover
pass
return True
def __enter__(self):
"""Acquire lock."""
self.acquire()
return self
def __exit__(self, typ, value, traceback):
"""Release lock."""
self.release()
def __del__(self):
"""Clear up `self.lockfile`."""
self.release() # pragma: no cover
class uninterruptible(object):
"""Decorator that postpones SIGTERM until wrapped function returns.
.. versionadded:: 1.12
.. important:: This decorator is NOT thread-safe.
As of version 2.7, Alfred allows Script Filters to be killed. If
your workflow is killed in the middle of critical code (e.g.
writing data to disk), this may corrupt your workflow's data.
Use this decorator to wrap critical functions that *must* complete.
If the script is killed while a wrapped function is executing,
the SIGTERM will be caught and handled after your function has
finished executing.
Alfred-PyWorkflow uses this internally to ensure its settings, data
and cache writes complete.
"""
def __init__(self, func, class_name=''):
"""Decorate `func`."""
self.func = func
functools.update_wrapper(self, func)
self._caught_signal = None
def signal_handler(self, signum, frame):
"""Called when process receives SIGTERM."""
self._caught_signal = (signum, frame)
def __call__(self, *args, **kwargs):
"""Trap ``SIGTERM`` and call wrapped function."""
self._caught_signal = None
# Register handler for SIGTERM, then call `self.func`
self.old_signal_handler = signal.getsignal(signal.SIGTERM)
signal.signal(signal.SIGTERM, self.signal_handler)
self.func(*args, **kwargs)
# Restore old signal handler
signal.signal(signal.SIGTERM, self.old_signal_handler)
# Handle any signal caught during execution
if self._caught_signal is not None:
signum, frame = self._caught_signal
if callable(self.old_signal_handler):
self.old_signal_handler(signum, frame)
elif self.old_signal_handler == signal.SIG_DFL:
sys.exit(0)
def __get__(self, obj=None, klass=None):
"""Decorator API."""
return self.__class__(self.func.__get__(obj, klass),
klass.__name__)
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/util.py
|
util.py
|
import os
import plistlib
import shutil
import subprocess
import sys
import tempfile
import time
from datetime import timedelta
import workflow
_wf = None
_log = None
#: Available system sounds from System Preferences > Sound > Sound Effects
# (location: ``/System/Library/Sounds``)
SOUNDS = (
'Basso',
'Blow',
'Bottle',
'Frog',
'Funk',
'Glass',
'Hero',
'Morse',
'Ping',
'Pop',
'Purr',
'Sosumi',
'Submarine',
'Tink',
)
def wf():
"""Return Workflow object for this module.
Returns:
workflow.Workflow: Workflow object for current workflow.
"""
global _wf
if _wf is None:
_wf = workflow.Workflow()
return _wf
def log():
"""Return logger for this module.
Returns:
logging.Logger: Logger for this module.
"""
global _log
if _log is None:
_log = wf().logger
return _log
def notificator_name():
"""Notificator name from Alfred's workflow name.
``Notificator for `~workflow.name`.app``
:returns: notificator name
:rtype: ``str``
"""
return f'Notificator for {wf().name}.app'
def notificator_program():
"""Return path to Notificator applet executable.
Returns:
str: Path to ``Notificator for `~workflow.name`.app`` ``applet`` executable.
"""
return wf().cachefile(f'{notificator_name()}/Contents/MacOS/applet')
def notificator_icon_path():
"""Return path to icon file in installed ``Notificator for `~workflow.name`.app``.
Returns:
str: Path to ``applet.icns`` within the app bundle.
"""
return wf().cachefile(f'{notificator_name()}/Contents/Resources/applet.icns')
def install_notificator():
"""Build the ``Notificator for `~workflow.name`.app`` from the workflow to cache directory.
Changes the bundle ID of the installed app and gives it the
workflow's icon.
"""
jxa_script='''
// Build argv/argc in a way that can be used from the applet inside the app bundle
ObjC.import("Foundation")
const args = $.NSProcessInfo.processInfo.arguments
const argv = []
const argc = args.count
for (let i = 0; i < argc; i++) { argv.push(ObjC.unwrap(args.objectAtIndex(i))) }
// Notification script
const app = Application.currentApplication()
app.includeStandardAdditions = true
if (argv.length < 2) { // We use "2" because the script will always see at least one argument: the applet itself
argv[1] = "Opening usage instructions…"
argv[2] = "Notificator is a command-line app"
argv[4] = "Funk"
app.openLocation("https://github.com/vitorgalvao/notificator#usage")
}
const message = argv[1]
const title = argv[2]
const subtitle = argv[3]
const sound = argv[4]
const options = {}
if (title) options.withTitle = title
if (subtitle) options.subtitle = subtitle
if (sound) options.soundName = sound
app.displayNotification(message, options)
'''
destdir = wf().cachedir
app_name = notificator_name()
app_path = os.path.join(destdir, app_name)
log().debug(f'installing "{app_name}" to {destdir} ...')
cmd = [
'osacompile',
'-l', 'JavaScript',
'-o', app_path,
'-e', jxa_script
]
retcode = subprocess.call(cmd)
if retcode != 0: # pragma: nocover
raise RuntimeError(f'oscompile exited with {retcode}')
n = notificator_program()
if not os.path.exists(n): # pragma: nocover
raise RuntimeError(f'{app_name} could not be installed in ' + destdir)
# Replace applet icon
icon = notificator_icon_path()
workflow_icon = wf().workflowfile('icon.png')
if os.path.exists(icon):
os.unlink(icon)
png_to_icns(workflow_icon, icon)
# Modify Notificator, change bundle ID of installed app
ip_path = os.path.join(app_path, 'Contents/Info.plist')
#bundle_id = f'{wf().bundleid}.{uuid.uuid4().hex}'
bundle_id = f'{wf().bundleid}'
with open(ip_path, 'rb') as fp:
data = plistlib.load(fp)
log().debug('changing bundle ID to %r', bundle_id)
data['CFBundleIdentifier'] = bundle_id
data['LSUIElement'] = '1'
with open(ip_path, 'wb') as fp:
plistlib.dump(data, fp)
# Redo signature
cmd = [
'codesign',
'--remove-signature', app_path
]
retcode = subprocess.call(cmd)
if retcode != 0: # pragma: nocover
raise RuntimeError(f'codesign remove-signature exited with {retcode}')
cmd = [
'codesign',
'--sign', '-', app_path
]
retcode = subprocess.call(cmd)
if retcode != 0: # pragma: nocover
raise RuntimeError(f'codesign sign exited with {retcode}')
def validate_sound(sound):
"""Coerce ``sound`` to valid sound name.
Returns ``None`` for invalid sounds. Sound names can be found
in ``System Preferences > Sound > Sound Effects`` or located at ``/System/Library/Sounds``.
Args:
sound (str): Name of system sound.
Returns:
str: Proper name of sound or ``None``.
"""
if not sound:
return None
# Case-insensitive comparison of `sound`
if sound.lower() in [s.lower() for s in SOUNDS]:
# Title-case is correct for all system sounds as of macOS 10.11
return sound.title()
return None
def notify(title='', subtitle='', message='', sound=None):
"""Post notification via notificator helper app from Vítor Galvão.
Args:
title (str, optional): Notification title.
subtitle (str, optional): Notification title.
message (str): Notification body text.
sound (str, optional): Name of sound to play.
Raises:
ValueError: Raised if both ``title`` and ``text`` are empty.
Returns:
bool: ``True`` if notification was posted, else ``False``.
"""
if message == '':
raise ValueError('Empty notification message')
sound = validate_sound(sound) or ''
n = notificator_program()
# Install if Notificator does not exist or was modified more than 30 days ago
if (not os.path.exists(n)) or timedelta(seconds=time.time() - os.path.getmtime(n)).days >= 30:
install_notificator()
cmd = [
n,
message,
title,
subtitle,
sound
]
retcode = subprocess.call(cmd)
if retcode == 0:
return True
log().error('Notify.app exited with status {0}.'.format(retcode))
return False
def convert_image(inpath, outpath, size):
"""Convert an image file using ``sips``.
Args:
inpath (str): Path of source file.
outpath (str): Path to destination file.
size (int): Width and height of destination image in pixels.
Raises:
RuntimeError: Raised if ``sips`` exits with non-zero status.
"""
cmd = [
'sips',
'--resampleHeightWidth', str(size), str(size),
inpath,
'--out', outpath]
# log().debug(cmd)
with open(os.devnull, 'w') as pipe:
retcode = subprocess.call(cmd, stdout=pipe, stderr=subprocess.STDOUT)
if retcode != 0:
raise RuntimeError('sips exited with %d' % retcode)
def png_to_icns(png_path, icns_path):
"""Convert PNG file to ICNS using ``iconutil``.
Create an iconset from the source PNG file. Generate PNG files
in each size required by macOS, then call ``iconutil`` to turn
them into a single ICNS file.
Args:
png_path (str): Path to source PNG file.
icns_path (str): Path to destination ICNS file.
Raises:
RuntimeError: Raised if ``iconutil`` or ``sips`` fail.
"""
tempdir = tempfile.mkdtemp(prefix='aw-', dir=wf().cachedir)
try:
iconset = os.path.join(tempdir, 'icon.iconset')
if os.path.exists(iconset): # pragma: nocover
raise RuntimeError('iconset already exists: ' + iconset)
os.makedirs(iconset)
# Copy source icon to icon set and generate all the other
# sizes needed
configs = []
for i in (16, 32, 64, 128, 256, 512):
configs.append((f'icon_{i}x{i}.png', i))
configs.append(((f'icon_{i}x{i}@2x.png', i * 2)))
for name, size in configs:
outpath = os.path.join(iconset, name)
if os.path.exists(outpath): # pragma: nocover
continue
convert_image(png_path, outpath, size)
cmd = [
'iconutil',
'--convert', 'icns',
'--output', icns_path,
iconset]
retcode = subprocess.call(cmd)
if retcode != 0:
raise RuntimeError(f'iconset exited with {retcode}')
if not os.path.exists(icns_path): # pragma: nocover
raise ValueError(f'generated ICNS file not found: {icns_path}')
finally:
try:
shutil.rmtree(tempdir)
except OSError: # pragma: no cover
pass
if __name__ == '__main__': # pragma: nocover
# Simple command-line script to test module with
# This won't work on 2.6, as `argparse` isn't available
# by default.
import argparse
from unicodedata import normalize
def ustr(s):
"""Coerce `s` to normalised Unicode."""
return normalize('NFD', s.decode('utf-8'))
p = argparse.ArgumentParser()
p.add_argument('-p', '--png', help="PNG image to convert to ICNS.")
p.add_argument('-l', '--list-sounds', help="Show available sounds.",
action='store_true')
p.add_argument('-t', '--title',
help="Notification title.", type=ustr,
default='')
p.add_argument('-s', '--sound', type=ustr,
help="Optional notification sound.", default='')
p.add_argument('text', type=ustr,
help="Notification body text.", default='', nargs='?')
o = p.parse_args()
# List available sounds
if o.list_sounds:
for sound in SOUNDS:
print(sound)
sys.exit(0)
# Convert PNG to ICNS
if o.png:
icns = os.path.join(
os.path.dirname(o.png),
os.path.splitext(os.path.basename(o.png))[0] + '.icns')
print('converting {0!r} to {1!r} ...'.format(o.png, icns),
file=sys.stderr)
if os.path.exists(icns):
raise ValueError('destination file already exists: ' + icns)
png_to_icns(o.png, icns)
sys.exit(0)
# Post notification
if o.title == o.text == '':
print('ERROR: empty notification.', file=sys.stderr)
sys.exit(1)
else:
notify(o.title, o.text, o.sound)
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/notify.py
|
notify.py
|
import os
import pickle
import signal
import subprocess
import sys
from workflow import Workflow
__all__ = ['is_running', 'run_in_background']
_wf = None
def wf():
global _wf
if _wf is None:
_wf = Workflow()
return _wf
def _log():
return wf().logger
def _arg_cache(name):
"""Return path to pickle cache file for arguments.
:param name: name of task
:type name: ``str``
:returns: Path to cache file
:rtype: ``str`` filepath
"""
return wf().cachefile(name + '.argcache')
def _pid_file(name):
"""Return path to PID file for ``name``.
:param name: name of task
:type name: ``str``
:returns: Path to PID file for task
:rtype: ``str`` filepath
"""
return wf().cachefile(name + '.pid')
def _process_exists(pid):
"""Check if a process with PID ``pid`` exists.
:param pid: PID to check
:type pid: ``int``
:returns: ``True`` if process exists, else ``False``
:rtype: ``Boolean``
"""
try:
os.kill(pid, 0)
except OSError: # not running
return False
return True
def _job_pid(name):
"""Get PID of job or `None` if job does not exist.
Args:
name (str): Name of job.
Returns:
int: PID of job process (or `None` if job doesn't exist).
"""
pidfile = _pid_file(name)
if not os.path.exists(pidfile):
return
with open(pidfile, 'r') as fp:
pid = int(fp.read())
if _process_exists(pid):
return pid
os.unlink(pidfile)
def is_running(name):
"""Test whether task ``name`` is currently running.
:param name: name of task
:type name: str
:returns: ``True`` if task with name ``name`` is running, else ``False``
:rtype: bool
"""
if _job_pid(name) is not None:
return True
return False
def _background(pidfile, stdin='/dev/null', stdout='/dev/null',
stderr='/dev/null'): # pragma: no cover
"""Fork the current process into a background daemon.
:param pidfile: file to write PID of daemon process to.
:type pidfile: filepath
:param stdin: where to read input
:type stdin: filepath
:param stdout: where to write stdout output
:type stdout: filepath
:param stderr: where to write stderr output
:type stderr: filepath
"""
def _fork_and_exit_parent(errmsg, wait=False, write=False):
try:
pid = os.fork()
if pid > 0:
if write: # write PID of child process to `pidfile`
tmp = pidfile + '.tmp'
with open(tmp, 'w') as fp:
fp.write(str(pid))
os.rename(tmp, pidfile)
if wait: # wait for child process to exit
os.waitpid(pid, 0)
os._exit(0)
except OSError as err:
_log().critical('%s: (%d) %s', errmsg, err.errno, err.strerror)
raise err
# Do first fork and wait for second fork to finish.
_fork_and_exit_parent('fork #1 failed', wait=True)
# Decouple from parent environment.
os.chdir(wf().workflowdir)
os.setsid()
# Do second fork and write PID to pidfile.
_fork_and_exit_parent('fork #2 failed', write=True)
# Now I am a daemon!
# Redirect standard file descriptors.
si = open(stdin, 'rb', 0)
so = open(stdout, 'ab+', 0)
se = open(stderr, 'ab+', 0)
if hasattr(sys.stdin, 'fileno'):
os.dup2(si.fileno(), sys.stdin.fileno())
if hasattr(sys.stdout, 'fileno'):
os.dup2(so.fileno(), sys.stdout.fileno())
if hasattr(sys.stderr, 'fileno'):
os.dup2(se.fileno(), sys.stderr.fileno())
def kill(name, sig=signal.SIGTERM):
"""Send a signal to job ``name`` via :func:`os.kill`.
.. versionadded:: 1.29
Args:
name (str): Name of the job
sig (int, optional): Signal to send (default: SIGTERM)
Returns:
bool: `False` if job isn't running, `True` if signal was sent.
"""
pid = _job_pid(name)
if pid is None:
return False
os.kill(pid, sig)
return True
def run_in_background(name, args, **kwargs):
r"""Cache arguments then call this script again via :func:`subprocess.call`.
:param name: name of job
:type name: str
:param args: arguments passed as first argument to :func:`subprocess.call`
:param \**kwargs: keyword arguments to :func:`subprocess.call`
:returns: exit code of sub-process
:rtype: int
When you call this function, it caches its arguments and then calls
``background.py`` in a subprocess. The Python subprocess will load the
cached arguments, fork into the background, and then run the command you
specified.
This function will return as soon as the ``background.py`` subprocess has
forked, returning the exit code of *that* process (i.e. not of the command
you're trying to run).
If that process fails, an error will be written to the log file.
If a process is already running under the same name, this function will
return immediately and will not run the specified command.
"""
if is_running(name):
_log().info('[%s] job already running', name)
return
argcache = _arg_cache(name)
# Cache arguments
with open(argcache, 'wb') as fp:
pickle.dump({'args': args, 'kwargs': kwargs}, fp)
_log().debug('[%s] command cached: %s', name, argcache)
# Call this script in module mode because of relativ import
cmd = ['/usr/bin/env', 'python3', '-m', 'workflow.background', name]
_log().debug('[%s] passing job to background runner: %r', name, cmd)
retcode = subprocess.call(cmd)
if retcode: # pragma: no cover
_log().error('[%s] background runner (%r) failed with %d', name, cmd, retcode)
else:
_log().debug('[%s] background job started', name)
return retcode
def main(wf): # pragma: no cover
"""Run command in a background process.
Load cached arguments, fork into background, then call
:meth:`subprocess.call` with cached arguments.
"""
log = wf.logger
name = wf.args[0]
argcache = _arg_cache(name)
if not os.path.exists(argcache):
msg = '[{0}] command cache not found: {1}'.format(name, argcache)
log.critical(msg)
raise IOError(msg)
# Fork to background and run command
pidfile = _pid_file(name)
_background(pidfile)
# Load cached arguments
with open(argcache, 'rb') as fp:
data = pickle.load(fp)
# Cached arguments
args = data['args']
kwargs = data['kwargs']
# Delete argument cache file
os.unlink(argcache)
try:
# Run the command
log.debug('[%s] running command: %r', name, args)
retcode = subprocess.call(args, **kwargs)
if retcode:
log.error('[%s] command failed with status %d', name, retcode)
finally:
os.unlink(pidfile)
log.debug('[%s] job complete', name)
if __name__ == '__main__': # pragma: no cover
wf().run(main)
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/background.py
|
background.py
|
import binascii
import json
import logging
import logging.handlers
import os
import pickle
import plistlib
import re
import shutil
import string
import subprocess
import sys
import time
import unicodedata
from copy import deepcopy
# imported to maintain API
from workflow.util import LockFile, atomic_writer, uninterruptible
#: Sentinel for properties that haven't been set yet (that might
#: correctly have the value ``None``)
UNSET = object()
####################################################################
# Standard system icons
####################################################################
# These icons are default macOS icons. They are super-high quality, and
# will be familiar to users.
# This library uses `ICON_ERROR` when a workflow dies in flames, so
# in my own workflows, I use `ICON_WARNING` for less fatal errors
# (e.g. bad user input, no results etc.)
# The system icons are all in this directory. There are many more than
# are listed here
ICON_ROOT = '/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources'
ICON_ACCOUNT = os.path.join(ICON_ROOT, 'Accounts.icns')
ICON_BURN = os.path.join(ICON_ROOT, 'BurningIcon.icns')
ICON_CLOCK = os.path.join(ICON_ROOT, 'Clock.icns')
ICON_COLOR = os.path.join(ICON_ROOT, 'ProfileBackgroundColor.icns')
ICON_COLOUR = ICON_COLOR # Queen's English, if you please
ICON_EJECT = os.path.join(ICON_ROOT, 'EjectMediaIcon.icns')
# Shown when a workflow throws an error
ICON_ERROR = os.path.join(ICON_ROOT, 'AlertStopIcon.icns')
ICON_FAVORITE = os.path.join(ICON_ROOT, 'ToolbarFavoritesIcon.icns')
ICON_FAVOURITE = ICON_FAVORITE
ICON_GROUP = os.path.join(ICON_ROOT, 'GroupIcon.icns')
ICON_HELP = os.path.join(ICON_ROOT, 'HelpIcon.icns')
ICON_HOME = os.path.join(ICON_ROOT, 'HomeFolderIcon.icns')
ICON_INFO = os.path.join(ICON_ROOT, 'ToolbarInfo.icns')
ICON_NETWORK = os.path.join(ICON_ROOT, 'GenericNetworkIcon.icns')
ICON_NOTE = os.path.join(ICON_ROOT, 'AlertNoteIcon.icns')
ICON_SETTINGS = os.path.join(ICON_ROOT, 'ToolbarAdvanced.icns')
ICON_SWIRL = os.path.join(ICON_ROOT, 'ErasingIcon.icns')
ICON_SWITCH = os.path.join(ICON_ROOT, 'General.icns')
ICON_SYNC = os.path.join(ICON_ROOT, 'Sync.icns')
ICON_TRASH = os.path.join(ICON_ROOT, 'TrashIcon.icns')
ICON_USER = os.path.join(ICON_ROOT, 'UserIcon.icns')
ICON_WARNING = os.path.join(ICON_ROOT, 'AlertCautionBadgeIcon.icns')
ICON_WEB = os.path.join(ICON_ROOT, 'BookmarkIcon.icns')
####################################################################
# non-ASCII to ASCII diacritic folding.
# Used by `fold_to_ascii` method
####################################################################
ASCII_REPLACEMENTS = {
'À': 'A',
'Á': 'A',
'Â': 'A',
'Ã': 'A',
'Ä': 'A',
'Å': 'A',
'Æ': 'AE',
'Ç': 'C',
'È': 'E',
'É': 'E',
'Ê': 'E',
'Ë': 'E',
'Ì': 'I',
'Í': 'I',
'Î': 'I',
'Ï': 'I',
'Ð': 'D',
'Ñ': 'N',
'Ò': 'O',
'Ó': 'O',
'Ô': 'O',
'Õ': 'O',
'Ö': 'O',
'Ø': 'O',
'Ù': 'U',
'Ú': 'U',
'Û': 'U',
'Ü': 'U',
'Ý': 'Y',
'Þ': 'Th',
'ß': 'ss',
'à': 'a',
'á': 'a',
'â': 'a',
'ã': 'a',
'ä': 'a',
'å': 'a',
'æ': 'ae',
'ç': 'c',
'è': 'e',
'é': 'e',
'ê': 'e',
'ë': 'e',
'ì': 'i',
'í': 'i',
'î': 'i',
'ï': 'i',
'ð': 'd',
'ñ': 'n',
'ò': 'o',
'ó': 'o',
'ô': 'o',
'õ': 'o',
'ö': 'o',
'ø': 'o',
'ù': 'u',
'ú': 'u',
'û': 'u',
'ü': 'u',
'ý': 'y',
'þ': 'th',
'ÿ': 'y',
'Ł': 'L',
'ł': 'l',
'Ń': 'N',
'ń': 'n',
'Ņ': 'N',
'ņ': 'n',
'Ň': 'N',
'ň': 'n',
'Ŋ': 'ng',
'ŋ': 'NG',
'Ō': 'O',
'ō': 'o',
'Ŏ': 'O',
'ŏ': 'o',
'Ő': 'O',
'ő': 'o',
'Œ': 'OE',
'œ': 'oe',
'Ŕ': 'R',
'ŕ': 'r',
'Ŗ': 'R',
'ŗ': 'r',
'Ř': 'R',
'ř': 'r',
'Ś': 'S',
'ś': 's',
'Ŝ': 'S',
'ŝ': 's',
'Ş': 'S',
'ş': 's',
'Š': 'S',
'š': 's',
'Ţ': 'T',
'ţ': 't',
'Ť': 'T',
'ť': 't',
'Ŧ': 'T',
'ŧ': 't',
'Ũ': 'U',
'ũ': 'u',
'Ū': 'U',
'ū': 'u',
'Ŭ': 'U',
'ŭ': 'u',
'Ů': 'U',
'ů': 'u',
'Ű': 'U',
'ű': 'u',
'Ŵ': 'W',
'ŵ': 'w',
'Ŷ': 'Y',
'ŷ': 'y',
'Ÿ': 'Y',
'Ź': 'Z',
'ź': 'z',
'Ż': 'Z',
'ż': 'z',
'Ž': 'Z',
'ž': 'z',
'ſ': 's',
'Α': 'A',
'Β': 'B',
'Γ': 'G',
'Δ': 'D',
'Ε': 'E',
'Ζ': 'Z',
'Η': 'E',
'Θ': 'Th',
'Ι': 'I',
'Κ': 'K',
'Λ': 'L',
'Μ': 'M',
'Ν': 'N',
'Ξ': 'Ks',
'Ο': 'O',
'Π': 'P',
'Ρ': 'R',
'Σ': 'S',
'Τ': 'T',
'Υ': 'U',
'Φ': 'Ph',
'Χ': 'Kh',
'Ψ': 'Ps',
'Ω': 'O',
'α': 'a',
'β': 'b',
'γ': 'g',
'δ': 'd',
'ε': 'e',
'ζ': 'z',
'η': 'e',
'θ': 'th',
'ι': 'i',
'κ': 'k',
'λ': 'l',
'μ': 'm',
'ν': 'n',
'ξ': 'x',
'ο': 'o',
'π': 'p',
'ρ': 'r',
'ς': 's',
'σ': 's',
'τ': 't',
'υ': 'u',
'φ': 'ph',
'χ': 'kh',
'ψ': 'ps',
'ω': 'o',
'А': 'A',
'Б': 'B',
'В': 'V',
'Г': 'G',
'Д': 'D',
'Е': 'E',
'Ж': 'Zh',
'З': 'Z',
'И': 'I',
'Й': 'I',
'К': 'K',
'Л': 'L',
'М': 'M',
'Н': 'N',
'О': 'O',
'П': 'P',
'Р': 'R',
'С': 'S',
'Т': 'T',
'У': 'U',
'Ф': 'F',
'Х': 'Kh',
'Ц': 'Ts',
'Ч': 'Ch',
'Ш': 'Sh',
'Щ': 'Shch',
'Ъ': "'",
'Ы': 'Y',
'Ь': "'",
'Э': 'E',
'Ю': 'Iu',
'Я': 'Ia',
'а': 'a',
'б': 'b',
'в': 'v',
'г': 'g',
'д': 'd',
'е': 'e',
'ж': 'zh',
'з': 'z',
'и': 'i',
'й': 'i',
'к': 'k',
'л': 'l',
'м': 'm',
'н': 'n',
'о': 'o',
'п': 'p',
'р': 'r',
'с': 's',
'т': 't',
'у': 'u',
'ф': 'f',
'х': 'kh',
'ц': 'ts',
'ч': 'ch',
'ш': 'sh',
'щ': 'shch',
'ъ': "'",
'ы': 'y',
'ь': "'",
'э': 'e',
'ю': 'iu',
'я': 'ia',
'ᴦ': 'G',
'ᴧ': 'L',
'ᴨ': 'P',
'ᴩ': 'R',
'ᴪ': 'PS',
'ẞ': 'Ss',
'Ỳ': 'Y',
'ỳ': 'y',
'Ỵ': 'Y',
'ỵ': 'y',
'Ỹ': 'Y',
'ỹ': 'y',
}
####################################################################
# Smart-to-dumb punctuation mapping
####################################################################
DUMB_PUNCTUATION = {
'‘': "'",
'’': "'",
'‚': "'",
'“': '"',
'”': '"',
'„': '"',
'–': '-',
'—': '-'
}
####################################################################
# Used by `Workflow.filter`
####################################################################
# Anchor characters in a name
#: Characters that indicate the beginning of a "word" in CamelCase
INITIALS = string.ascii_uppercase + string.digits
#: Split on non-letters, numbers
split_on_delimiters = re.compile('[^a-zA-Z0-9]').split
# Match filter flags
#: Match items that start with ``query``
MATCH_STARTSWITH = 1
#: Match items whose capital letters start with ``query``
MATCH_CAPITALS = 2
#: Match items with a component "word" that matches ``query``
MATCH_ATOM = 4
#: Match items whose initials (based on atoms) start with ``query``
MATCH_INITIALS_STARTSWITH = 8
#: Match items whose initials (based on atoms) contain ``query``
MATCH_INITIALS_CONTAIN = 16
#: Combination of :const:`MATCH_INITIALS_STARTSWITH` and
#: :const:`MATCH_INITIALS_CONTAIN`
MATCH_INITIALS = 24
#: Match items if ``query`` is a substring
MATCH_SUBSTRING = 32
#: Match items if all characters in ``query`` appear in the item in order
MATCH_ALLCHARS = 64
#: Combination of all other ``MATCH_*`` constants
MATCH_ALL = 127
####################################################################
# Used by `Workflow.check_update`
####################################################################
# Number of days to wait between checking for updates to the workflow
DEFAULT_UPDATE_FREQUENCY = 1
####################################################################
# Keychain access errors
####################################################################
class KeychainError(Exception):
"""Raised for unknown Keychain errors.
Raised by methods :meth:`Workflow.save_password`,
:meth:`Workflow.get_password` and :meth:`Workflow.delete_password`
when ``security`` CLI app returns an unknown error code.
"""
class PasswordNotFound(KeychainError):
"""Password not in Keychain.
Raised by method :meth:`Workflow.get_password` when ``account``
is unknown to the Keychain.
"""
class PasswordExists(KeychainError):
"""Raised when trying to overwrite an existing account password.
You should never receive this error: it is used internally
by the :meth:`Workflow.save_password` method to know if it needs
to delete the old password first (a Keychain implementation detail).
"""
####################################################################
# Helper functions
####################################################################
def isascii(text):
"""Test if ``text`` contains only ASCII characters.
:param text: text to test for ASCII-ness
:type text: ``str``
:returns: ``True`` if ``text`` contains only ASCII characters
:rtype: ``Boolean``
"""
try:
text.encode('ascii')
except UnicodeEncodeError:
return False
return True
####################################################################
# Implementation classes
####################################################################
class SerializerManager(object):
"""Contains registered serializers.
.. versionadded:: 1.8
A configured instance of this class is available at
:attr:`workflow.manager`.
Use :meth:`register()` to register new (or replace
existing) serializers, which you can specify by name when calling
:class:`~workflow.Workflow` data storage methods.
See :ref:`guide-serialization` and :ref:`guide-persistent-data`
for further information.
"""
def __init__(self):
"""Create new SerializerManager object."""
self._serializers = {}
def register(self, name, serializer):
"""Register ``serializer`` object under ``name``.
Raises :class:`AttributeError` if ``serializer`` in invalid.
.. note::
``name`` will be used as the file extension of the saved files.
:param name: Name to register ``serializer`` under
:type name: ``str``
:param serializer: object with ``load()`` and ``dump()``
methods
"""
# Basic validation
getattr(serializer, 'load')
getattr(serializer, 'dump')
self._serializers[name] = serializer
def serializer(self, name):
"""Return serializer object for ``name``.
:param name: Name of serializer to return
:type name: ``str`` or ``bytes``
:returns: serializer object or ``None`` if no such serializer
is registered.
"""
return self._serializers.get(name)
def unregister(self, name):
"""Remove registered serializer with ``name``.
Raises a :class:`ValueError` if there is no such registered
serializer.
:param name: Name of serializer to remove
:type name: ``str`` or ``bytes``
:returns: serializer object
"""
if name not in self._serializers:
raise ValueError('No such serializer registered : {0}'.format(
name))
serializer = self._serializers[name]
del self._serializers[name]
return serializer
@property
def serializers(self):
"""Return names of registered serializers."""
return sorted(self._serializers.keys())
class JSONSerializer(object):
"""Wrapper around :mod:`json`. Sets ``indent`` and ``encoding``.
.. versionadded:: 1.8
Use this serializer if you need readable data files. JSON doesn't
support Python objects as well as ``pickle``, so be
careful which data you try to serialize as JSON.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open JSON file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from JSON file
:rtype: object
"""
return json.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open JSON file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: JSON-serializable data structure
:param file_obj: file handle
:type file_obj: ``file`` object
"""
file_obj.write(bytes(json.dumps(obj, indent=2), 'utf-8'))
class PickleSerializer(object):
"""Wrapper around :mod:`pickle`. Sets ``protocol``.
.. versionadded:: 1.8
Use this serializer if you need to add custom pickling.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open pickle file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from pickle file
:rtype: object
"""
return pickle.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open pickle file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: Python object
:param file_obj: file handle
:type file_obj: ``file`` object
"""
return pickle.dump(obj, file_obj, protocol=-1)
# Set up default manager and register built-in serializers
manager = SerializerManager()
manager.register('pickle', PickleSerializer)
manager.register('json', JSONSerializer)
class Variables(dict):
"""Workflow variables for Run Script actions.
.. versionadded: 1.26
This class allows you to set workflow variables from
Run Script actions.
It is a subclass of :class:`dict`.
>>> v = Variables(username='deanishe', password='hunter2')
>>> v.arg = 'output value'
>>> print(v)
See :ref:`variables-run-script` in the User Guide for more
information.
Args:
arg (str or list, optional): Main output/``{query}``.
**variables: Workflow variables to set.
In Alfred 4.1+ and Alfred-PyWorkflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
Attributes:
arg (str or list): Output value (``{query}``).
In Alfred 4.1+ and Alfred-PyWorkflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
config (dict): Configuration for downstream workflow element.
"""
def __init__(self, arg=None, **variables):
"""Create a new `Variables` object."""
self.arg = arg
self.config = {}
super(Variables, self).__init__(**variables)
@property
def obj(self):
"""``alfredworkflow`` :class:`dict`."""
o = {}
if self:
d2 = {}
for k, v in self.items():
d2[k] = v
o['variables'] = d2
if self.config:
o['config'] = self.config
if self.arg is not None:
o['arg'] = self.arg
return {'alfredworkflow': o}
def __str__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
str: ``alfredworkflow`` JSON object
"""
if not self and not self.config:
if not self.arg:
return ''
if isinstance(self.arg, str):
return self.arg
return json.dumps(self.obj)
def __bytes__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
bytes: UTF-8 encoded ``alfredworkflow`` JSON object
"""
return bytes(str(self), 'utf-8')
class Modifier(object):
"""Modify :class:`Item` arg/icon/variables when modifier key is pressed.
Don't use this class directly (as it won't be associated with any
:class:`Item`), but rather use :meth:`Item.add_modifier()`
to add modifiers to results.
>>> it = wf.add_item('Title', 'Subtitle', valid=True)
>>> it.setvar('name', 'default')
>>> m = it.add_modifier('cmd')
>>> m.setvar('name', 'alternate')
See :ref:`workflow-variables` in the User Guide for more information
and :ref:`example usage <example-variables>`.
Args:
key (str): Modifier key, e.g. ``"cmd"``, ``"alt"`` etc.
subtitle (str, optional): Override default subtitle.
arg (str, optional): Argument to pass for this modifier.
valid (bool, optional): Override item's validity.
icon (str, optional): Filepath/UTI of icon to use
icontype (str, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
Attributes:
arg (str): Arg to pass to following action.
config (dict): Configuration for a downstream element, such as
a File Filter.
icon (str): Filepath/UTI of icon.
icontype (str): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
key (str): Modifier key (see above).
subtitle (str): Override item subtitle.
valid (bool): Override item validity.
variables (dict): Workflow variables set by this modifier.
"""
def __init__(self, key, subtitle=None, arg=None, valid=None, icon=None,
icontype=None):
"""Create a new :class:`Modifier`.
Don't use this class directly (as it won't be associated with any
:class:`Item`), but rather use :meth:`Item.add_modifier()`
to add modifiers to results.
Args:
key (str): Modifier key, e.g. ``"cmd"``, ``"alt"`` etc.
subtitle (str, optional): Override default subtitle.
arg (str, optional): Argument to pass for this modifier.
valid (bool, optional): Override item's validity.
icon (str, optional): Filepath/UTI of icon to use
icontype (str, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
"""
self.key = key
self.subtitle = subtitle
self.arg = arg
self.valid = valid
self.icon = icon
self.icontype = icontype
self.config = {}
self.variables = {}
def setvar(self, name, value):
"""Set a workflow variable for this Item.
Args:
name (str): Name of variable.
value (str): Value of variable.
"""
self.variables[name] = value
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (str): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
str or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
@property
def obj(self):
"""Modifier formatted for JSON serialization for Alfred 3.
Returns:
dict: Modifier for serializing to JSON.
"""
o = {}
if self.subtitle is not None:
o['subtitle'] = self.subtitle
if self.arg is not None:
o['arg'] = self.arg
if self.valid is not None:
o['valid'] = self.valid
if self.variables:
o['variables'] = self.variables
if self.config:
o['config'] = self.config
icon = self._icon()
if icon:
o['icon'] = icon
return o
def _icon(self):
"""Return `icon` object for item.
Returns:
dict: Mapping for item `icon` (may be empty).
"""
icon = {}
if self.icon is not None:
icon['path'] = self.icon
if self.icontype is not None:
icon['type'] = self.icontype
return icon
class Item(object):
"""Represents a feedback item for Alfred 3+.
Generates Alfred-compliant JSON for a single item.
Don't use this class directly (as it then won't be associated with
any :class:`Workflow <workflow.Workflow>` object), but rather use
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`.
See :meth:`~workflow.Workflow.add_item` for details of arguments.
"""
def __init__(self, title, subtitle='', arg=None, autocomplete=None,
match=None, valid=False, uid=None, icon=None, icontype=None,
type=None, largetext=None, copytext=None, quicklookurl=None):
"""Create a new :class:`Item` object.
Use same arguments as for
:class:`Workflow.Item <workflow.Workflow.Item>`.
Argument ``subtitle_modifiers`` is not supported.
"""
self.title = title
self.subtitle = subtitle
self.arg = arg
self.autocomplete = autocomplete
self.match = match
self.valid = valid
self.uid = uid
self.icon = icon
self.icontype = icontype
self.type = type
self.quicklookurl = quicklookurl
self.largetext = largetext
self.copytext = copytext
self.modifiers = {}
self.config = {}
self.variables = {}
def setvar(self, name, value):
"""Set a workflow variable for this Item.
Args:
name (str): Name of variable.
value (str): Value of variable.
"""
self.variables[name] = value
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (str): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
str or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
def add_modifier(self, key, subtitle=None, arg=None, valid=None, icon=None,
icontype=None):
"""Add alternative values for a modifier key.
Args:
key (str): Modifier key, e.g. ``"cmd"`` or ``"alt"``
subtitle (str, optional): Override item subtitle.
arg (str, optional): Input for following action.
valid (bool, optional): Override item validity.
icon (str, optional): Filepath/UTI of icon.
icontype (str, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
In Alfred 4.1+ and Alfred-PyWorkflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
Returns:
Modifier: Configured :class:`Modifier`.
"""
mod = Modifier(key, subtitle, arg, valid, icon, icontype)
# Add Item variables to Modifier
mod.variables.update(self.variables)
self.modifiers[key] = mod
return mod
@property
def obj(self):
"""Item formatted for JSON serialization.
Returns:
dict: Data suitable for Alfred 3 feedback.
"""
# Required values
o = {
'title': self.title,
'subtitle': self.subtitle,
'valid': self.valid,
}
# Optional values
if self.arg is not None:
o['arg'] = self.arg
if self.autocomplete is not None:
o['autocomplete'] = self.autocomplete
if self.match is not None:
o['match'] = self.match
if self.uid is not None:
o['uid'] = self.uid
if self.type is not None:
o['type'] = self.type
if self.quicklookurl is not None:
o['quicklookurl'] = self.quicklookurl
if self.variables:
o['variables'] = self.variables
if self.config:
o['config'] = self.config
# Largetype and copytext
text = self._text()
if text:
o['text'] = text
icon = self._icon()
if icon:
o['icon'] = icon
# Modifiers
mods = self._modifiers()
if mods:
o['mods'] = mods
return o
def _icon(self):
"""Return `icon` object for item.
Returns:
dict: Mapping for item `icon` (may be empty).
"""
icon = {}
if self.icon is not None:
icon['path'] = self.icon
if self.icontype is not None:
icon['type'] = self.icontype
return icon
def _text(self):
"""Return `largetext` and `copytext` object for item.
Returns:
dict: `text` mapping (may be empty)
"""
text = {}
if self.largetext is not None:
text['largetype'] = self.largetext
if self.copytext is not None:
text['copy'] = self.copytext
return text
def _modifiers(self):
"""Build `mods` dictionary for JSON feedback.
Returns:
dict: Modifier mapping or `None`.
"""
if self.modifiers:
mods = {}
for k, mod in self.modifiers.items():
mods[k] = mod.obj
return mods
return None
class Settings(dict):
"""A dictionary that saves itself when changed.
Dictionary keys & values will be saved as a JSON file
at ``filepath``. If the file does not exist, the dictionary
(and settings file) will be initialised with ``defaults``.
:param filepath: where to save the settings
:type filepath: :class:`str`
:param defaults: dict of default settings
:type defaults: :class:`dict`
An appropriate instance is provided by :class:`Workflow` instances at
:attr:`Workflow.settings`.
"""
def __init__(self, filepath, defaults=None):
"""Create new :class:`Settings` object."""
super(Settings, self).__init__()
self._filepath = filepath
self._nosave = False
self._original = {}
if os.path.exists(self._filepath):
self._load()
elif defaults:
for key, val in defaults.items():
self[key] = val
self.save() # save default settings
def _load(self):
"""Load cached settings from JSON file `self._filepath`."""
data = {}
with LockFile(self._filepath, 0.5):
with open(self._filepath, 'rb') as fp:
data.update(json.load(fp))
self._original = deepcopy(data)
self._nosave = True
self.update(data)
self._nosave = False
@uninterruptible
def save(self):
"""Save settings to JSON file specified in ``self._filepath``.
If you're using this class via :attr:`Workflow.settings`, which
you probably are, ``self._filepath`` will be ``settings.json``
in your workflow's data directory (see :attr:`~Workflow.datadir`).
"""
if self._nosave:
return
data = {}
data.update(self)
with LockFile(self._filepath, 0.5):
with atomic_writer(self._filepath, 'w') as fp:
json.dump(data, fp, sort_keys=True, indent=2)
# dict methods
def __setitem__(self, key, value):
"""Implement :class:`dict` interface."""
if self._original.get(key) != value:
super(Settings, self).__setitem__(key, value)
self.save()
def __delitem__(self, key):
"""Implement :class:`dict` interface."""
super(Settings, self).__delitem__(key)
self.save()
def update(self, *args, **kwargs):
"""Override :class:`dict` method to save on update."""
super(Settings, self).update(*args, **kwargs)
self.save()
def setdefault(self, key, value=None):
"""Override :class:`dict` method to save on update."""
ret = super(Settings, self).setdefault(key, value)
self.save()
return ret
class Workflow(object):
"""The ``Workflow`` object is the main interface to Alfred-PyWorkflow.
It provides APIs for accessing the Alfred/workflow environment,
storing & caching data, using Keychain, and generating Script
Filter feedback.
``Workflow`` is compatible with Alfred 3+.
:param default_settings: default workflow settings. If no settings file
exists, :class:`Workflow.settings` will be pre-populated with
``default_settings``.
:type default_settings: :class:`dict`
:param update_settings: settings for updating your workflow from
GitHub releases. The only required key is ``github_slug``,
whose value must take the form of ``username/repo``.
If specified, ``Workflow`` will check the repo's releases
for updates. Your workflow must also have a semantic version
number. Please see the :ref:`User Manual <user-manual>` and
`update API docs <api-updates>` for more information.
:type update_settings: :class:`dict`
:param input_encoding: encoding of command line arguments. You
should probably leave this as the default (``utf-8``), which
is the encoding Alfred uses.
:type input_encoding: :class:`str`
:param normalization: normalization to apply to CLI args.
See :meth:`Workflow.decode` for more details.
:type normalization: :class:`str`
:param capture_args: Capture and act on ``workflow:*`` arguments. See
:ref:`Magic arguments <magic-arguments>` for details.
:type capture_args: :class:`Boolean`
:param libraries: sequence of paths to directories containing
libraries. These paths will be prepended to ``sys.path``.
:type libraries: :class:`tuple` or :class:`list`
:param help_url: URL to webpage where a user can ask for help with
the workflow, report bugs, etc. This could be the GitHub repo
or a page on AlfredForum.com. If your workflow throws an error,
this URL will be displayed in the log and Alfred's debugger. It can
also be opened directly in a web browser with the ``workflow:help``
:ref:`magic argument <magic-arguments>`.
:type help_url: :class:`str`
Attributes:
item_class (class): Class used to generate feedback items.
variables (dict): Top level workflow variables.
"""
# Which class to use to generate feedback items. You probably
# won't want to change this
item_class = Item
def __init__(self, default_settings=None, update_settings=None,
input_encoding='utf-8', normalization='NFC',
capture_args=True, libraries=None,
help_url=None):
"""Create new :class:`Workflow` object."""
self._default_settings = default_settings or {}
self._update_settings = update_settings or {}
self._input_encoding = input_encoding
self._normalization = normalization
self._capture_args = capture_args
self.help_url = help_url
self._workflowdir = None
self._settings_path = None
self._settings = None
self._bundleid = None
self._debugging = None
self._name = None
self._cache_serializer = 'pickle'
self._data_serializer = 'pickle'
self._info = None
self._info_loaded = False
self._logger = None
self._items = []
self._alfred_env = None
self.variables = {}
self._rerun = 0
# Version number of the workflow
self._version = UNSET
# Version from last workflow run
self._last_version_run = UNSET
# Cache for regex patterns created for filter keys
self._search_pattern_cache = {}
#: Prefix for all magic arguments.
#: The default value is ``workflow:`` so keyword
#: ``config`` would match user query ``workflow:config``.
self.magic_prefix = 'workflow:'
#: Mapping of available magic arguments. The built-in magic
#: arguments are registered by default. To add your own magic arguments
#: (or override built-ins), add a key:value pair where the key is
#: what the user should enter (prefixed with :attr:`magic_prefix`)
#: and the value is a callable that will be called when the argument
#: is entered. If you would like to display a message in Alfred, the
#: function should return a ``str`` string.
#:
#: By default, the magic arguments documented
#: :ref:`here <magic-arguments>` are registered.
self.magic_arguments = {}
self._register_default_magic()
if libraries:
sys.path = libraries + sys.path
# Get session ID from environment if present
self._session_id = os.getenv('_WF_SESSION_ID') or None
if self._session_id:
self.setvar('_WF_SESSION_ID', self._session_id)
####################################################################
# API methods
####################################################################
# info.plist contents and alfred_* environment variables ----------
@property
def alfred_version(self):
"""Alfred version as :class:`~workflow.update.Version` object."""
from .update import Version
return Version(self.alfred_env.get('version'))
@property
def alfred_env(self):
"""Dict of Alfred's environmental variables minus ``alfred_`` prefix.
.. versionadded:: 1.7
The variables Alfred 2.4+ exports are:
============================ =========================================
Variable Description
============================ =========================================
debug Set to ``1`` if Alfred's debugger is
open, otherwise unset.
preferences Path to Alfred.alfredpreferences
(where your workflows and settings are
stored).
preferences_localhash Machine-specific preferences are stored
in ``Alfred.alfredpreferences/preferences/local/<hash>``
(see ``preferences`` above for
the path to ``Alfred.alfredpreferences``)
theme ID of selected theme
theme_background Background colour of selected theme in
format ``rgba(r,g,b,a)``
theme_subtext Show result subtext.
``0`` = Always,
``1`` = Alternative actions only,
``2`` = Selected result only,
``3`` = Never
version Alfred version number, e.g. ``'2.4'``
version_build Alfred build number, e.g. ``277``
workflow_bundleid Bundle ID, e.g.
``net.deanishe.alfred-mailto``
workflow_cache Path to workflow's cache directory
workflow_data Path to workflow's data directory
workflow_name Name of current workflow
workflow_uid UID of workflow
workflow_version The version number specified in the
workflow configuration sheet/info.plist
============================ =========================================
**Note:** all values are Unicode strings except ``version_build`` and
``theme_subtext``, which are integers.
:returns: ``dict`` of Alfred's environmental variables without the
``alfred_`` prefix, e.g. ``preferences``, ``workflow_data``.
"""
if self._alfred_env is not None:
return self._alfred_env
data = {}
for key in (
'debug',
'preferences',
'preferences_localhash',
'theme',
'theme_background',
'theme_subtext',
'version',
'version_build',
'workflow_bundleid',
'workflow_cache',
'workflow_data',
'workflow_name',
'workflow_uid',
'workflow_version'):
value = os.getenv('alfred_' + key, '')
if value:
if key in ('debug', 'version_build', 'theme_subtext'):
value = int(value)
else:
value = self.decode(value)
data[key] = value
self._alfred_env = data
return self._alfred_env
@property
def info(self):
""":class:`dict` of ``info.plist`` contents."""
if not self._info_loaded:
self._load_info_plist()
return self._info
@property
def bundleid(self):
"""Workflow bundle ID from environmental vars or ``info.plist``.
:returns: bundle ID
:rtype: ``str``
"""
if not self._bundleid:
if self.alfred_env.get('workflow_bundleid'):
self._bundleid = self.alfred_env.get('workflow_bundleid')
else:
self._bundleid = self.info['bundleid']
return self._bundleid
@property
def debugging(self):
"""Whether Alfred's debugger is open.
:returns: ``True`` if Alfred's debugger is open.
:rtype: ``bool``
"""
return self.alfred_env.get('debug') == 1
@property
def name(self):
"""Workflow name from Alfred's environmental vars or ``info.plist``.
:returns: workflow name
:rtype: ``str``
"""
if not self._name:
if self.alfred_env.get('workflow_name'):
self._name = self.decode(self.alfred_env.get('workflow_name'))
else:
self._name = self.decode(self.info['name'])
return self._name
@property
def version(self):
"""Return the version of the workflow.
.. versionadded:: 1.9.10
Get the workflow version from environment variable,
the ``update_settings`` dict passed on
instantiation, the ``version`` file located in the workflow's
root directory or ``info.plist``. Return ``None`` if none
exists or :class:`ValueError` if the version number is invalid
(i.e. not semantic).
:returns: Version of the workflow (not Alfred-PyWorkflow)
:rtype: :class:`~workflow.update.Version` object
"""
if self._version is UNSET:
version = None
# environment variable has priority
if self.alfred_env.get('workflow_version'):
version = self.alfred_env['workflow_version']
# Try `update_settings`
elif self._update_settings:
version = self._update_settings.get('version')
# `version` file
if not version:
filepath = self.workflowfile('version')
if os.path.exists(filepath):
with open(filepath, 'r') as fileobj:
version = fileobj.read()
# info.plist
if not version:
version = self.info.get('version')
if version:
from .update import Version
version = Version(version)
self._version = version
return self._version
# Workflow utility methods -----------------------------------------
@property
def args(self):
"""Return command line args as normalised unicode.
Args are decoded and normalised via :meth:`~Workflow.decode`.
The encoding and normalization are the ``input_encoding`` and
``normalization`` arguments passed to :class:`Workflow` (``UTF-8``
and ``NFC`` are the defaults).
If :class:`Workflow` is called with ``capture_args=True``
(the default), :class:`Workflow` will look for certain
``workflow:*`` args and, if found, perform the corresponding
actions and exit the workflow.
See :ref:`Magic arguments <magic-arguments>` for details.
"""
msg = None
args = [self.decode(arg) for arg in sys.argv[1:]]
# Handle magic args
if len(args) and self._capture_args:
for name in self.magic_arguments:
key = '{0}{1}'.format(self.magic_prefix, name)
if key in args:
msg = self.magic_arguments[name]()
if msg:
self.logger.debug(msg)
if not sys.stdout.isatty(): # Show message in Alfred
self.add_item(msg, valid=False, icon=ICON_INFO)
self.send_feedback()
sys.exit(0)
return args
@property
def cachedir(self):
"""Path to workflow's cache directory.
The cache directory is a subdirectory of Alfred's own cache directory
in ``~/Library/Caches``. The full path is in Alfred 4+ is:
``~/Library/Caches/com.runningwithcrayons.Alfred/Workflow Data/<bundle id>``
Returns:
str: full path to workflow's cache directory
"""
if self.alfred_env.get('workflow_cache'):
dirpath = self.alfred_env.get('workflow_cache')
else:
dirpath = self._default_cachedir
return self._create(dirpath)
@property
def _default_cachedir(self):
"""Alfred 4's default cache directory."""
return os.path.join(
os.path.expanduser(
'~/Library/Caches/com.runningwithcrayons.Alfred/'
'Workflow Data/'),
self.bundleid)
@property
def datadir(self):
"""Path to workflow's data directory.
The data directory is a subdirectory of Alfred's own data directory in
``~/Library/Application Support``. The full path for Alfred 4+ is:
``~/Library/Application Support/Alfred/Workflow Data/<bundle id>``
Returns:
str: full path to workflow data directory
"""
if self.alfred_env.get('workflow_data'):
dirpath = self.alfred_env.get('workflow_data')
else:
dirpath = self._default_datadir
return self._create(dirpath)
@property
def _default_datadir(self):
"""Alfred 4's default data directory."""
return os.path.join(os.path.expanduser(
'~/Library/Application Support/Alfred/Workflow Data/'),
self.bundleid)
@property
def workflowdir(self):
"""Path to workflow's root directory (where ``info.plist`` is).
Returns:
str: full path to workflow root directory
"""
if not self._workflowdir:
# Try the working directory first, then the directory
# the library is in. CWD will be the workflow root if
# a workflow is being run in Alfred
candidates = [
os.path.abspath(os.getcwd()),
os.path.dirname(os.path.abspath(os.path.dirname(__file__)))]
# climb the directory tree until we find `info.plist`
for dirpath in candidates:
# Ensure directory path is Unicode
dirpath = self.decode(dirpath)
while True:
if os.path.exists(os.path.join(dirpath, 'info.plist')):
self._workflowdir = dirpath
break
elif dirpath == '/':
# no `info.plist` found
break
# Check the parent directory
dirpath = os.path.dirname(dirpath)
# No need to check other candidates
if self._workflowdir:
break
if not self._workflowdir:
raise IOError("'info.plist' not found in directory tree")
return self._workflowdir
@property
def rerun(self):
"""How often (in seconds) Alfred should re-run the Script Filter."""
return self._rerun
@rerun.setter
def rerun(self, seconds):
"""Interval at which Alfred should re-run the Script Filter.
Args:
seconds (int): Interval between runs.
"""
self._rerun = seconds
@property
def session_id(self):
"""A unique session ID every time the user uses the workflow.
.. versionadded:: 1.25
The session ID persists while the user is using this workflow.
It expires when the user runs a different workflow or closes
Alfred.
"""
if not self._session_id:
from uuid import uuid4
self._session_id = uuid4().hex
self.setvar('_WF_SESSION_ID', self._session_id)
return self._session_id
def setvar(self, name, value, persist=False):
"""Set a "global" workflow variable.
.. versionchanged:: 1.33
These variables are always passed to downstream workflow objects.
If you have set :attr:`rerun`, these variables are also passed
back to the script when Alfred runs it again.
Args:
name (str): Name of variable.
value (str): Value of variable.
persist (bool, optional): Also save variable to ``info.plist``?
"""
self.variables[name] = value
if persist:
from .util import set_config
set_config(name, value, self.bundleid)
self.logger.debug('saved variable %r with value %r to info.plist',
name, value)
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (str): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
str or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
def cachefile(self, filename):
"""Path to ``filename`` in workflow's cache directory.
Return absolute path to ``filename`` within your workflow's
:attr:`cache directory <Workflow.cachedir>`.
:param filename: basename of file
:type filename: ``str``
:returns: full path to file within cache directory
:rtype: ``str``
"""
if isinstance(filename, bytes):
filename = str(filename, 'utf8')
return os.path.join(self.cachedir, filename)
def datafile(self, filename):
"""Path to ``filename`` in workflow's data directory.
Return absolute path to ``filename`` within your workflow's
:attr:`data directory <Workflow.datadir>`.
:param filename: basename of file
:type filename: ``str``
:returns: full path to file within data directory
:rtype: ``str``
"""
if isinstance(filename, bytes):
filename = str(filename, 'utf8')
return os.path.join(self.datadir, filename)
def workflowfile(self, filename):
"""Return full path to ``filename`` in workflow's root directory.
:param filename: basename of file
:type filename: ``str``
:returns: full path to file within data directory
:rtype: ``str``
"""
if isinstance(filename, bytes):
filename = str(filename, 'utf8')
return os.path.join(self.workflowdir, filename)
@property
def logfile(self):
"""Path to logfile.
:returns: path to logfile within workflow's cache directory
:rtype: ``str``
"""
return self.cachefile('%s.log' % self.bundleid)
@property
def logger(self):
"""Logger that logs to both console and a log file.
If Alfred's debugger is open, log level will be ``DEBUG``,
else it will be ``INFO``.
Use :meth:`open_log` to open the log file in Console.
:returns: an initialised :class:`~logging.Logger`
"""
if self._logger:
return self._logger
# Initialise new logger and optionally handlers
logger = logging.getLogger('')
# Only add one set of handlers
# Exclude from coverage, as pytest will have configured the
# root logger already
if not len(logger.handlers): # pragma: no cover
fmt = logging.Formatter(
'%(asctime)s %(filename)s:%(lineno)s'
' %(levelname)-8s %(message)s',
datefmt='%H:%M:%S')
logfile = logging.handlers.RotatingFileHandler(
self.logfile,
maxBytes=1024 * 1024,
backupCount=1)
logfile.setFormatter(fmt)
logger.addHandler(logfile)
console = logging.StreamHandler()
console.setFormatter(fmt)
logger.addHandler(console)
if self.debugging:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
self._logger = logger
return self._logger
@logger.setter
def logger(self, logger):
"""Set a custom logger.
:param logger: The logger to use
:type logger: `~logging.Logger` instance
"""
self._logger = logger
@property
def settings_path(self):
"""Path to settings file within workflow's data directory.
:returns: path to ``settings.json`` file
:rtype: ``str``
"""
if not self._settings_path:
self._settings_path = self.datafile('settings.json')
return self._settings_path
@property
def settings(self):
"""Return a dictionary subclass that saves itself when changed.
See :ref:`guide-settings` in the :ref:`user-manual` for more
information on how to use :attr:`settings` and **important
limitations** on what it can do.
:returns: :class:`~workflow.workflow.Settings` instance
initialised from the data in JSON file at
:attr:`settings_path` or if that doesn't exist, with the
``default_settings`` :class:`dict` passed to
:class:`Workflow` on instantiation.
:rtype: :class:`~workflow.workflow.Settings` instance
"""
if not self._settings:
self.logger.debug('reading settings from %s', self.settings_path)
self._settings = Settings(self.settings_path,
self._default_settings)
return self._settings
@property
def cache_serializer(self):
"""Name of default cache serializer.
.. versionadded:: 1.8
This serializer is used by :meth:`cache_data()` and
:meth:`cached_data()`
See :class:`SerializerManager` for details.
:returns: serializer name
:rtype: ``str``
"""
return self._cache_serializer
@cache_serializer.setter
def cache_serializer(self, serializer_name):
"""Set the default cache serialization format.
.. versionadded:: 1.8
This serializer is used by :meth:`cache_data()` and
:meth:`cached_data()`
The specified serializer must already by registered with the
:class:`SerializerManager` at `~workflow.workflow.manager`,
otherwise a :class:`ValueError` will be raised.
:param serializer_name: Name of default serializer to use.
:type serializer_name:
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
'Unknown serializer : `{0}`. Register your serializer '
'with `manager` first.'.format(serializer_name))
self.logger.debug('default cache serializer: %s', serializer_name)
self._cache_serializer = serializer_name
@property
def data_serializer(self):
"""Name of default data serializer.
.. versionadded:: 1.8
This serializer is used by :meth:`store_data()` and
:meth:`stored_data()`
See :class:`SerializerManager` for details.
:returns: serializer name
:rtype: ``str``
"""
return self._data_serializer
@data_serializer.setter
def data_serializer(self, serializer_name):
"""Set the default cache serialization format.
.. versionadded:: 1.8
This serializer is used by :meth:`store_data()` and
:meth:`stored_data()`
The specified serializer must already by registered with the
:class:`SerializerManager` at `~workflow.workflow.manager`,
otherwise a :class:`ValueError` will be raised.
:param serializer_name: Name of serializer to use by default.
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
'Unknown serializer : `{0}`. Register your serializer '
'with `manager` first.'.format(serializer_name))
self.logger.debug('default data serializer: %s', serializer_name)
self._data_serializer = serializer_name
@property
def _session_prefix(self):
"""Filename prefix for current session."""
return '_wfsess-{0}-'.format(self.session_id)
def _mk_session_name(self, name):
"""New cache name/key based on session ID."""
return self._session_prefix + name
def stored_data(self, name):
"""Retrieve data from data directory.
Returns ``None`` if there are no data stored under ``name``.
.. versionadded:: 1.8
:param name: name of datastore
"""
metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
if not os.path.exists(metadata_path):
self.logger.debug('no data stored for `%s`', name)
return None
with open(metadata_path, 'r') as file_obj:
serializer_name = file_obj.read().strip()
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
'Unknown serializer `{0}`. Register a corresponding '
'serializer with `manager.register()` '
'to load this data.'.format(serializer_name))
self.logger.debug('data `%s` stored as `%s`', name, serializer_name)
filename = '{0}.{1}'.format(name, serializer_name)
data_path = self.datafile(filename)
if not os.path.exists(data_path):
self.logger.debug('no data stored: %s', name)
if os.path.exists(metadata_path):
os.unlink(metadata_path)
return None
with open(data_path, 'rb') as file_obj:
data = serializer.load(file_obj)
self.logger.debug('stored data loaded: %s', data_path)
return data
def store_data(self, name, data, serializer=None):
"""Save data to data directory.
.. versionadded:: 1.8
If ``data`` is ``None``, the datastore will be deleted.
Note that the datastore does NOT support mutliple threads.
:param name: name of datastore
:param data: object(s) to store. **Note:** some serializers
can only handled certain types of data.
:param serializer: name of serializer to use. If no serializer
is specified, the default will be used. See
:class:`SerializerManager` for more information.
:returns: data in datastore or ``None``
"""
# Ensure deletion is not interrupted by SIGTERM
@uninterruptible
def delete_paths(paths):
"""Clear one or more data stores"""
for path in paths:
if os.path.exists(path):
os.unlink(path)
self.logger.debug('deleted data file: %s', path)
serializer_name = serializer or self.data_serializer
# In order for `stored_data()` to be able to load data stored with
# an arbitrary serializer, yet still have meaningful file extensions,
# the format (i.e. extension) is saved to an accompanying file
metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
filename = '{0}.{1}'.format(name, serializer_name)
data_path = self.datafile(filename)
if data_path == self.settings_path:
raise ValueError(
'Cannot save data to' +
'`{0}` with format `{1}`. '.format(name, serializer_name) +
"This would overwrite Alfred-PyWorkflow's settings file.")
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
'Invalid serializer `{0}`. Register your serializer with '
'`manager.register()` first.'.format(serializer_name))
if data is None: # Delete cached data
delete_paths((metadata_path, data_path))
return
# Ensure write is not interrupted by SIGTERM
@uninterruptible
def _store():
# Save file extension
with atomic_writer(metadata_path, 'w') as file_obj:
file_obj.write(serializer_name)
with atomic_writer(data_path, 'wb') as file_obj:
serializer.dump(data, file_obj)
_store()
self.logger.debug('saved data: %s', data_path)
def cached_data(self, name, data_func=None, max_age=60, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Return cached data if younger than ``max_age`` seconds.
Retrieve data from cache or re-generate and re-cache data if
stale/non-existant. If ``max_age`` is 0, return cached data no
matter how old.
:param name: Name of datastore
:type name: ``str``
:param data_func: Callable that returns fresh data. It
is called if the cache has expired or doesn't exist.
:type data_func: ``callable``
:param max_age: Maximum allowable age of cached data in seconds.
:type max_age: ``int``
:param session: Whether to scope the cache to the current session (optional).
:type session: ``bool``
:returns: Cached data, return value of ``data_func`` or ``None``
if ``data_func`` is not set.
"""
if session:
name = self._mk_session_name(name)
serializer = manager.serializer(self.cache_serializer)
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
age = self.cached_data_age(name)
if (age < max_age or max_age == 0) and os.path.exists(cache_path):
with open(cache_path, 'rb') as file_obj:
self.logger.debug('loading cached data: %s', cache_path)
return serializer.load(file_obj)
if not data_func:
return None
data = data_func()
self.cache_data(name, data)
return data
def clear_session_cache(self, current=False):
"""Remove session data from the cache.
.. versionadded:: 1.25
.. versionchanged:: 1.27
By default, data belonging to the current session won't be
deleted. Set ``current=True`` to also clear current session.
Args:
current (bool, optional): If ``True``, also remove data for
current session.
"""
def _is_session_file(filename):
if current:
return filename.startswith('_wfsess-')
return filename.startswith('_wfsess-') \
and not filename.startswith(self._session_prefix)
self.clear_cache(_is_session_file)
def cache_data(self, name, data, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Save ``data`` to cache under ``name``. If ``data`` is
``None``, the corresponding cache file will be deleted.
:param name: name of datastore
:type name: ``str``
:param data: Data to store. This may be any object supported by
the cache serializer
:type data: ``object``
param session: Whether to scope the cache to the
current session (optional).
:type session: ``bool``
If ``session`` is ``True``, then ``name`` is prefixed
with :attr:`session_id`.
"""
if session:
name = self._mk_session_name(name)
serializer = manager.serializer(self.cache_serializer)
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
if data is None:
if os.path.exists(cache_path):
os.unlink(cache_path)
self.logger.debug('deleted cache file: %s', cache_path)
return
with atomic_writer(cache_path, 'wb') as file_obj:
serializer.dump(data, file_obj)
self.logger.debug('cached data: %s', cache_path)
@property
def obj(self):
"""Feedback formatted for JSON serialization.
Returns:
dict: Data suitable for Alfred 3 feedback.
"""
items = []
for item in self._items:
items.append(item.obj)
o = {'items': items}
if self.variables:
o['variables'] = self.variables
if self.rerun:
o['rerun'] = self.rerun
return o
def warn_empty(self, title, subtitle='', icon=None):
"""Add a warning to feedback if there are no items.
.. versionadded:: 1.31
Add a "warning" item to Alfred feedback if no other items
have been added. This is a handy shortcut to prevent Alfred
from showing its fallback searches, which is does if no
items are returned.
Args:
title (str): Title of feedback item.
subtitle (str, optional): Subtitle of feedback item.
icon (str, optional): Icon for feedback item. If not
specified, ``ICON_WARNING`` is used.
Returns:
Item: Newly-created item.
"""
if len(self._items):
return
icon = icon or ICON_WARNING
return self.add_item(title, subtitle, icon=icon)
def cached_data_fresh(self, name, max_age):
"""Whether cache `name` is less than `max_age` seconds old.
:param name: name of datastore
:param max_age: maximum age of data in seconds
:type max_age: ``int``
:returns: ``True`` if data is less than ``max_age`` old, else
``False``
"""
age = self.cached_data_age(name)
if not age:
return False
return age < max_age
def cached_data_age(self, name):
"""Return age in seconds of cache `name` or 0 if cache doesn't exist.
:param name: name of datastore
:type name: ``str``
:returns: age of datastore in seconds
:rtype: ``int``
"""
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
if not os.path.exists(cache_path):
return 0
return time.time() - os.stat(cache_path).st_mtime
def filter(self, query, items, key=lambda x: x, ascending=False,
include_score=False, min_score=0, max_results=0,
match_on=MATCH_ALL, fold_diacritics=True):
"""Fuzzy search filter. Returns list of ``items`` that match ``query``.
``query`` is case-insensitive. Any item that does not contain the
entirety of ``query`` is rejected.
If ``query`` is an empty string or contains only whitespace,
all items will match.
:param query: query to test items against
:type query: ``str``
:param items: iterable of items to test
:type items: ``list`` or ``tuple``
:param key: function to get comparison key from ``items``.
Must return a ``str`` string. The default simply returns
the item.
:type key: ``callable``
:param ascending: set to ``True`` to get worst matches first
:type ascending: ``Boolean``
:param include_score: Useful for debugging the scoring algorithm.
If ``True``, results will be a list of tuples
``(item, score, rule)``.
:type include_score: ``Boolean``
:param min_score: If non-zero, ignore results with a score lower
than this.
:type min_score: ``int``
:param max_results: If non-zero, prune results list to this length.
:type max_results: ``int``
:param match_on: Filter option flags. Bitwise-combined list of
``MATCH_*`` constants (see below).
:type match_on: ``int``
:param fold_diacritics: Convert search keys to ASCII-only
characters if ``query`` only contains ASCII characters.
:type fold_diacritics: ``Boolean``
:returns: list of ``items`` matching ``query`` or list of
``(item, score, rule)`` `tuples` if ``include_score`` is ``True``.
``rule`` is the ``MATCH_*`` rule that matched the item.
:rtype: ``list``
**Matching rules**
By default, :meth:`filter` uses all of the following flags (i.e.
:const:`MATCH_ALL`). The tests are always run in the given order:
1. :const:`MATCH_STARTSWITH`
Item search key starts with ``query`` (case-insensitive).
2. :const:`MATCH_CAPITALS`
The list of capital letters in item search key starts with
``query`` (``query`` may be lower-case). E.g., ``of``
would match ``OmniFocus``, ``gc`` would match ``Google Chrome``.
3. :const:`MATCH_ATOM`
Search key is split into "atoms" on non-word characters
(.,-,' etc.). Matches if ``query`` is one of these atoms
(case-insensitive).
4. :const:`MATCH_INITIALS_STARTSWITH`
Initials are the first characters of the above-described
"atoms" (case-insensitive).
5. :const:`MATCH_INITIALS_CONTAIN`
``query`` is a substring of the above-described initials.
6. :const:`MATCH_INITIALS`
Combination of (4) and (5).
7. :const:`MATCH_SUBSTRING`
``query`` is a substring of item search key (case-insensitive).
8. :const:`MATCH_ALLCHARS`
All characters in ``query`` appear in item search key in
the same order (case-insensitive).
9. :const:`MATCH_ALL`
Combination of all the above.
:const:`MATCH_ALLCHARS` is considerably slower than the other
tests and provides much less accurate results.
**Examples:**
To ignore :const:`MATCH_ALLCHARS` (tends to provide the worst
matches and is expensive to run), use
``match_on=MATCH_ALL ^ MATCH_ALLCHARS``.
To match only on capitals, use ``match_on=MATCH_CAPITALS``.
To match only on startswith and substring, use
``match_on=MATCH_STARTSWITH | MATCH_SUBSTRING``.
**Diacritic folding**
.. versionadded:: 1.3
If ``fold_diacritics`` is ``True`` (the default), and ``query``
contains only ASCII characters, non-ASCII characters in search keys
will be converted to ASCII equivalents (e.g. **ü** -> **u**,
**ß** -> **ss**, **é** -> **e**).
See :const:`ASCII_REPLACEMENTS` for all replacements.
If ``query`` contains non-ASCII characters, search keys will not be
altered.
"""
if not query:
return items
# Remove preceding/trailing spaces
query = query.strip()
if not query:
return items
# Use user override if there is one
fold_diacritics = self.settings.get('__workflow_diacritic_folding',
fold_diacritics)
results = []
for item in items:
skip = False
score = 0
words = [s.strip() for s in query.split(' ')]
value = key(item).strip()
if value == '':
continue
for word in words:
if word == '':
continue
s, rule = self._filter_item(value, word, match_on,
fold_diacritics)
if not s: # Skip items that don't match part of the query
skip = True
score += s
if skip:
continue
if score:
# use "reversed" `score` (i.e. highest becomes lowest) and
# `value` as sort key. This means items with the same score
# will be sorted in alphabetical not reverse alphabetical order
results.append(((100.0 / score, value.lower(), score),
(item, score, rule)))
# sort on keys, then discard the keys
results.sort(reverse=ascending)
results = [t[1] for t in results]
if min_score:
results = [r for r in results if r[1] > min_score]
if max_results and len(results) > max_results:
results = results[:max_results]
# return list of ``(item, score, rule)``
if include_score:
return results
# just return list of items
return [t[0] for t in results]
def _filter_item(self, value, query, match_on, fold_diacritics):
"""Filter ``value`` against ``query`` using rules ``match_on``.
:returns: ``(score, rule)``
"""
query = query.lower()
if not isascii(query):
fold_diacritics = False
if fold_diacritics:
value = self.fold_to_ascii(value)
# pre-filter any items that do not contain all characters
# of ``query`` to save on running several more expensive tests
if not set(query) <= set(value.lower()):
return (0, None)
# item starts with query
if match_on & MATCH_STARTSWITH and value.lower().startswith(query):
score = 100.0 - (len(value) / len(query))
return (score, MATCH_STARTSWITH)
# query matches capitalised letters in item,
# e.g. of = OmniFocus
if match_on & MATCH_CAPITALS:
initials = ''.join([c for c in value if c in INITIALS])
if initials.lower().startswith(query):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_CAPITALS)
# split the item into "atoms", i.e. words separated by
# spaces or other non-word characters
if (match_on & MATCH_ATOM or
match_on & MATCH_INITIALS_CONTAIN or
match_on & MATCH_INITIALS_STARTSWITH):
atoms = [s.lower() for s in split_on_delimiters(value)]
# print('atoms : %s --> %s' % (value, atoms))
# initials of the atoms
initials = ''.join([s[0] for s in atoms if s])
if match_on & MATCH_ATOM:
# is `query` one of the atoms in item?
# similar to substring, but scores more highly, as it's
# a word within the item
if query in atoms:
score = 100.0 - (len(value) / len(query))
return (score, MATCH_ATOM)
# `query` matches start (or all) of the initials of the
# atoms, e.g. ``himym`` matches "How I Met Your Mother"
# *and* "how i met your mother" (the ``capitals`` rule only
# matches the former)
if (match_on & MATCH_INITIALS_STARTSWITH and
initials.startswith(query)):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_STARTSWITH)
# `query` is a substring of initials, e.g. ``doh`` matches
# "The Dukes of Hazzard"
elif (match_on & MATCH_INITIALS_CONTAIN and
query in initials):
score = 95.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_CONTAIN)
# `query` is a substring of item
if match_on & MATCH_SUBSTRING and query in value.lower():
score = 90.0 - (len(value) / len(query))
return (score, MATCH_SUBSTRING)
# finally, assign a score based on how close together the
# characters in `query` are in item.
if match_on & MATCH_ALLCHARS:
search = self._search_for_query(query)
match = search(value)
if match:
score = 100.0 / ((1 + match.start()) *
(match.end() - match.start() + 1))
return (score, MATCH_ALLCHARS)
# Nothing matched
return (0, None)
def _search_for_query(self, query):
if query in self._search_pattern_cache:
return self._search_pattern_cache[query]
# Build pattern: include all characters
pattern = []
for c in query:
# pattern.append('[^{0}]*{0}'.format(re.escape(c)))
pattern.append('.*?{0}'.format(re.escape(c)))
pattern = ''.join(pattern)
search = re.compile(pattern, re.IGNORECASE).search
self._search_pattern_cache[query] = search
return search
def run(self, func, text_errors=False):
"""Call ``func`` to run your workflow.
:param func: Callable to call with ``self`` (i.e. the :class:`Workflow`
instance) as first argument.
:param text_errors: Emit error messages in plain text, not in
Alfred's XML/JSON feedback format. Use this when you're not
running Alfred-PyWorkflow in a Script Filter and would like
to pass the error message to, say, a notification.
:type text_errors: ``Boolean``
``func`` will be called with :class:`Workflow` instance as first
argument.
``func`` should be the main entry point to your workflow.
Any exceptions raised will be logged and an error message will be
output to Alfred.
"""
start = time.time()
# Write to debugger to ensure "real" output starts on a new line
print('.', file=sys.stderr)
# Call workflow's entry function/method within a try-except block
# to catch any errors and display an error message in Alfred
try:
if self.version:
self.logger.debug('---------- %s (%s) ----------',
self.name, self.version)
else:
self.logger.debug('---------- %s ----------', self.name)
# Run update check if configured for self-updates.
# This call has to go in the `run` try-except block, as it will
# initialise `self.settings`, which will raise an exception
# if `settings.json` isn't valid.
if self._update_settings:
self.check_update()
# Run workflow's entry function/method
func(self)
# Set last version run to current version after a successful
# run
self.set_last_version()
except Exception as err:
self.logger.exception(err)
if self.help_url:
self.logger.info('for assistance, see: %s', self.help_url)
if not sys.stdout.isatty(): # Show error in Alfred
if text_errors:
print(str(err), end='')
else:
self._items = []
if self._name:
name = self._name
elif self._bundleid: # pragma: no cover
name = self._bundleid
else: # pragma: no cover
name = os.path.dirname(__file__)
self.add_item(f"Error in workflow '{name}'",
str(err),
icon=ICON_ERROR)
self.send_feedback()
return 1
finally:
self.logger.debug('---------- finished in %0.3fs ----------',
time.time() - start)
return 0
# Alfred feedback methods ------------------------------------------
def add_item(self, title, subtitle='', arg=None, autocomplete=None,
valid=False, uid=None, icon=None, icontype=None, type=None,
largetext=None, copytext=None, quicklookurl=None, match=None):
"""Add an item to be output to Alfred.
:param title: Title shown in Alfred
:type title: ``str``
:param subtitle: Subtitle shown in Alfred
:type subtitle: ``str``
:param arg: Argument passed by Alfred as ``{query}`` when item is
actioned
:type arg: ``str``, ``list`` or ``tuple``
:param autocomplete: Text expanded in Alfred when item is TABbed
:type autocomplete: ``str``
:param valid: Whether or not item can be actioned
:type valid: ``Boolean``
:param uid: Used by Alfred to remember/sort items
:type uid: ``str``
:param icon: Filename of icon to use
:type icon: ``str``
:param icontype: Type of icon. Must be one of ``None`` , ``'filetype'``
or ``'fileicon'``. Use ``'filetype'`` when ``icon`` is a filetype
such as ``'public.folder'``. Use ``'fileicon'`` when you wish to
use the icon of the file specified as ``icon``, e.g.
``icon='/Applications/Safari.app', icontype='fileicon'``.
Leave as `None` if ``icon`` points to an actual
icon file.
:type icontype: ``str``
:param type: Result type. Currently only ``'file'`` is supported
(by Alfred). This will tell Alfred to enable file actions for
this item.
:type type: ``str``
:param largetext: Text to be displayed in Alfred's large text box
if user presses CMD+L on item.
:type largetext: ``str``
:param copytext: Text to be copied to pasteboard if user presses
CMD+C on item.
:type copytext: ``str``
:param quicklookurl: URL to be displayed using Alfred's Quick Look
feature (tapping ``SHIFT`` or ``⌘+Y`` on a result).
:type quicklookurl: ``str``
param match: If you have "Alfred filters results" turned on for
your Script Filter, Alfred (version 3.5 and above)
will filter against this field, not ``title`` (optional).
:type match: ``str``
:returns: Alfred feedback :class:`Item` instance
See :ref:`icons` for a list of the supported system icons.
See :meth:`Workflow.add_item() <workflow.Workflow.add_item>` for
the main documentation and other parameters.
.. note::
Although this method returns an :class:`Item` instance, you don't
need to hold onto it or worry about it. All generated :class:`Item`
instances are also collected internally and sent to Alfred when
:meth:`send_feedback` is called.
The generated :class:`Item` is only returned in case you want to
edit it or do something with it other than send it to Alfred. E.g.
use the :meth:`~Item.add_modifier()` method to shown subtitles
when modifier (CMD, OPT etc.) is pressed.
"""
item = self.item_class(title, subtitle, arg, autocomplete,
match, valid, uid, icon, icontype, type,
largetext, copytext, quicklookurl)
# Add variables to child item
item.variables.update(self.variables)
self._items.append(item)
return item
def send_feedback(self):
"""Print stored items to console/Alfred as JSON."""
if self.debugging:
json.dump(self.obj, sys.stdout, indent=2, separators=(',', ': '))
else:
json.dump(self.obj, sys.stdout)
sys.stdout.flush()
####################################################################
# Updating methods
####################################################################
@property
def first_run(self):
"""Return ``True`` if it's the first time this version has run.
.. versionadded:: 1.9.10
Raises a :class:`ValueError` if :attr:`version` isn't set.
"""
if not self.version:
raise ValueError('No workflow version set')
if not self.last_version_run:
return True
return self.version != self.last_version_run
@property
def last_version_run(self):
"""Return version of last version to run (or ``None``).
.. versionadded:: 1.9.10
:returns: :class:`~workflow.update.Version` instance
or ``None``
"""
if self._last_version_run is UNSET:
version = self.settings.get('__workflow_last_version')
if version:
from .update import Version
version = Version(version)
self._last_version_run = version
self.logger.debug('last run version: %s', self._last_version_run)
return self._last_version_run
def set_last_version(self, version=None):
"""Set :attr:`last_version_run` to current version.
.. versionadded:: 1.9.10
:param version: version to store (default is current version)
:type version: :class:`~workflow.update.Version` instance
or ``str``
:returns: ``True`` if version is saved, else ``False``
"""
if not version:
if not self.version:
self.logger.warning(
"Can't save last version: workflow has no version")
return False
version = self.version
if isinstance(version, str):
from .update import Version
version = Version(version)
self.settings['__workflow_last_version'] = str(version)
self.logger.debug('set last run version: %s', version)
return True
@property
def update_available(self):
"""Whether an update is available.
.. versionadded:: 1.9
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:returns: ``True`` if an update is available, else ``False``
"""
key = '__workflow_latest_version'
# Create a new workflow object to ensure standard serialiser
# is used (update.py is called without the user's settings)
status = Workflow().cached_data(key, max_age=0)
# self.logger.debug('update status: %r', status)
if not status or not status.get('available'):
return False
return status['available']
@property
def prereleases(self):
"""Whether workflow should update to pre-release versions.
.. versionadded:: 1.16
:returns: ``True`` if pre-releases are enabled with the :ref:`magic
argument <magic-arguments>` or the ``update_settings`` dict, else
``False``.
"""
if self._update_settings.get('prereleases'):
return True
return self.settings.get('__workflow_prereleases') or False
def check_update(self, force=False):
"""Call update script if it's time to check for a new release.
.. versionadded:: 1.9
The update script will be run in the background, so it won't
interfere in the execution of your workflow.
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:param force: Force update check
:type force: ``Boolean``
"""
key = '__workflow_latest_version'
frequency = self._update_settings.get('frequency',
DEFAULT_UPDATE_FREQUENCY)
if not force and not self.settings.get('__workflow_autoupdate', True):
self.logger.debug('Auto update turned off by user')
return
# Check for new version if it's time
if (force or not self.cached_data_fresh(key, frequency * 86400)):
repo = self._update_settings['github_slug']
# version = self._update_settings['version']
version = str(self.version)
from .background import run_in_background
cmd = ['/usr/bin/env', 'python3', '-m', 'workflow.update', 'check', repo, version]
if self.prereleases:
cmd.append('--prereleases')
self.logger.info('checking for update ...')
run_in_background('__workflow_update_check', cmd)
else:
self.logger.debug('update check not due')
def start_update(self):
"""Check for update and download and install new workflow file.
.. versionadded:: 1.9
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:returns: ``True`` if an update is available and will be
installed, else ``False``
"""
from . import update
repo = self._update_settings['github_slug']
# version = self._update_settings['version']
version = str(self.version)
if not update.check_update(repo, version, self.prereleases):
return False
from .background import run_in_background
cmd = ['/usr/bin/env python3', '-m', 'workflow.update', 'install', repo, version]
if self.prereleases:
cmd.append('--prereleases')
self.logger.debug('downloading update ...')
run_in_background('__workflow_update_install', cmd)
return True
####################################################################
# Keychain password storage methods
####################################################################
def save_password(self, account, password, service=None):
"""Save account credentials.
If the account exists, the old password will first be deleted
(Keychain throws an error otherwise).
If something goes wrong, a :class:`KeychainError` exception will
be raised.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param password: the password to secure
:type password: ``str``
:param service: Name of the service. By default, this is the
workflow's bundle ID
:type service: ``str``
"""
if not service:
service = self.bundleid
try:
self._call_security('add-generic-password', service, account,
'-w', password)
self.logger.debug('saved password : %s:%s', service, account)
except PasswordExists:
self.logger.debug('password exists : %s:%s', service, account)
current_password = self.get_password(account, service)
if current_password == password:
self.logger.debug('password unchanged')
else:
self.delete_password(account, service)
self._call_security('add-generic-password', service,
account, '-w', password)
self.logger.debug('save_password : %s:%s', service, account)
def get_password(self, account, service=None):
"""Retrieve the password saved at ``service/account``.
Raise :class:`PasswordNotFound` exception if password doesn't exist.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param service: Name of the service. By default, this is the workflow's
bundle ID
:type service: ``str``
:returns: account password
:rtype: ``str``
"""
if not service:
service = self.bundleid
output = self._call_security('find-generic-password', service,
account, '-g')
# Parsing of `security` output is adapted from python-keyring
# by Jason R. Coombs
# https://pypi.python.org/pypi/keyring
m = re.search(
r'password:\s*(?:0x(?P<hex>[0-9A-F]+)\s*)?(?:"(?P<pw>.*)")?',
output)
if m:
groups = m.groupdict()
h = groups.get('hex')
password = groups.get('pw')
if h:
password = str(binascii.unhexlify(h), 'utf-8')
self.logger.debug('got password : %s:%s', service, account)
return password
def delete_password(self, account, service=None):
"""Delete the password stored at ``service/account``.
Raise :class:`PasswordNotFound` if account is unknown.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param service: Name of the service. By default, this is the workflow's
bundle ID
:type service: ``str``
"""
if not service:
service = self.bundleid
self._call_security('delete-generic-password', service, account)
self.logger.debug('deleted password : %s:%s', service, account)
####################################################################
# Methods for workflow:* magic args
####################################################################
def _register_default_magic(self):
"""Register the built-in magic arguments."""
# TODO: refactor & simplify
# Wrap callback and message with callable
def callback(func, msg):
def wrapper():
func()
return msg
return wrapper
self.magic_arguments['delcache'] = callback(self.clear_cache,
'Deleted workflow cache')
self.magic_arguments['deldata'] = callback(self.clear_data,
'Deleted workflow data')
self.magic_arguments['delsettings'] = callback(
self.clear_settings, 'Deleted workflow settings')
self.magic_arguments['reset'] = callback(self.reset,
'Reset workflow')
self.magic_arguments['openlog'] = callback(self.open_log,
'Opening workflow log file')
self.magic_arguments['opencache'] = callback(
self.open_cachedir, 'Opening workflow cache directory')
self.magic_arguments['opendata'] = callback(
self.open_datadir, 'Opening workflow data directory')
self.magic_arguments['openworkflow'] = callback(
self.open_workflowdir, 'Opening workflow directory')
self.magic_arguments['openterm'] = callback(
self.open_terminal, 'Opening workflow root directory in Terminal')
# Diacritic folding
def fold_on():
self.settings['__workflow_diacritic_folding'] = True
return 'Diacritics will always be folded'
def fold_off():
self.settings['__workflow_diacritic_folding'] = False
return 'Diacritics will never be folded'
def fold_default():
if '__workflow_diacritic_folding' in self.settings:
del self.settings['__workflow_diacritic_folding']
return 'Diacritics folding reset'
self.magic_arguments['foldingon'] = fold_on
self.magic_arguments['foldingoff'] = fold_off
self.magic_arguments['foldingdefault'] = fold_default
# Updates
def update_on():
self.settings['__workflow_autoupdate'] = True
return 'Auto update turned on'
def update_off():
self.settings['__workflow_autoupdate'] = False
return 'Auto update turned off'
def prereleases_on():
self.settings['__workflow_prereleases'] = True
return 'Prerelease updates turned on'
def prereleases_off():
self.settings['__workflow_prereleases'] = False
return 'Prerelease updates turned off'
def do_update():
if self.start_update():
return 'Downloading and installing update ...'
else:
return 'No update available'
self.magic_arguments['autoupdate'] = update_on
self.magic_arguments['noautoupdate'] = update_off
self.magic_arguments['prereleases'] = prereleases_on
self.magic_arguments['noprereleases'] = prereleases_off
self.magic_arguments['update'] = do_update
# Help
def do_help():
if self.help_url:
self.open_help()
return 'Opening workflow help URL in browser'
else:
return 'Workflow has no help URL'
def show_version():
if self.version:
return 'Version: {0}'.format(self.version)
else:
return 'This workflow has no version number'
def list_magic():
"""Display all available magic args in Alfred."""
isatty = sys.stderr.isatty()
for name in sorted(self.magic_arguments.keys()):
if name == 'magic':
continue
arg = self.magic_prefix + name
self.logger.debug(arg)
if not isatty:
self.add_item(arg, icon=ICON_INFO)
if not isatty:
self.send_feedback()
self.magic_arguments['help'] = do_help
self.magic_arguments['magic'] = list_magic
self.magic_arguments['version'] = show_version
def clear_cache(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`cachedir`.
:param filter_func: Callable to determine whether a file should be
deleted or not. ``filter_func`` is called with the filename
of each file in the data directory. If it returns ``True``,
the file will be deleted.
By default, *all* files will be deleted.
:type filter_func: ``callable``
"""
self._delete_directory_contents(self.cachedir, filter_func)
def clear_data(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`datadir`.
:param filter_func: Callable to determine whether a file should be
deleted or not. ``filter_func`` is called with the filename
of each file in the data directory. If it returns ``True``,
the file will be deleted.
By default, *all* files will be deleted.
:type filter_func: ``callable``
"""
self._delete_directory_contents(self.datadir, filter_func)
def clear_settings(self):
"""Delete workflow's :attr:`settings_path`."""
if os.path.exists(self.settings_path):
os.unlink(self.settings_path)
self.logger.debug('deleted : %r', self.settings_path)
def reset(self):
"""Delete workflow settings, cache and data.
File :attr:`settings <settings_path>` and directories
:attr:`cache <cachedir>` and :attr:`data <datadir>` are deleted.
"""
self.clear_cache()
self.clear_data()
self.clear_settings()
def open_log(self):
"""Open :attr:`logfile` in default app (usually Console.app)."""
subprocess.call(['open', self.logfile]) # nosec
def open_cachedir(self):
"""Open the workflow's :attr:`cachedir` in Finder."""
subprocess.call(['open', self.cachedir]) # nosec
def open_datadir(self):
"""Open the workflow's :attr:`datadir` in Finder."""
subprocess.call(['open', self.datadir]) # nosec
def open_workflowdir(self):
"""Open the workflow's :attr:`workflowdir` in Finder."""
subprocess.call(['open', self.workflowdir]) # nosec
def open_terminal(self):
"""Open a Terminal window at workflow's :attr:`workflowdir`."""
subprocess.call(['open', '-a', 'Terminal', self.workflowdir]) # nosec
def open_help(self):
"""Open :attr:`help_url` in default browser."""
subprocess.call(['open', self.help_url]) # nosec
return 'Opening workflow help URL in browser'
####################################################################
# Helper methods
####################################################################
def decode(self, text, encoding=None, normalization=None):
"""Return ``text`` as normalised unicode.
If ``encoding`` and/or ``normalization`` is ``None``, the
``input_encoding``and ``normalization`` parameters passed to
:class:`Workflow` are used.
:param text: string
:type text: encoded or Unicode string. If ``text`` is already a
Unicode string, it will only be normalised.
:param encoding: The text encoding to use to decode ``text`` to
Unicode.
:type encoding: ``str`` or ``None``
:param normalization: The nomalisation form to apply to ``text``.
:type normalization: ``str`` or ``None``
:returns: decoded and normalised ``str``
:class:`Workflow` uses "NFC" normalisation by default. This is the
standard for Python and will work well with data from the web (via
:mod:`~workflow.web` or :mod:`json`).
macOS, on the other hand, uses "NFD" normalisation (nearly), so data
coming from the system (e.g. via :mod:`subprocess` or
:func:`os.listdir`/:mod:`os.path`) may not match. You should either
normalise this data, too, or change the default normalisation used by
:class:`Workflow`.
"""
encoding = encoding or self._input_encoding
normalization = normalization or self._normalization
if not isinstance(text, str):
text = str(text, encoding)
return unicodedata.normalize(normalization, text)
def fold_to_ascii(self, text):
"""Convert non-ASCII characters to closest ASCII equivalent.
.. versionadded:: 1.3
.. note:: This only works for a subset of European languages.
:param text: text to convert
:type text: ``str``
:returns: text containing only ASCII characters
:rtype: ``str``
"""
if isascii(text):
return text
text = ''.join([ASCII_REPLACEMENTS.get(c, c) for c in text])
return unicodedata.normalize('NFKD', text)
def dumbify_punctuation(self, text):
"""Convert non-ASCII punctuation to closest ASCII equivalent.
This method replaces "smart" quotes and n- or m-dashes with their
workaday ASCII equivalents. This method is currently not used
internally, but exists as a helper method for workflow authors.
.. versionadded: 1.9.7
:param text: text to convert
:type text: ``str``
:returns: text with only ASCII punctuation
:rtype: ``str``
"""
if isascii(text):
return text
text = ''.join([DUMB_PUNCTUATION.get(c, c) for c in text])
return text
def _delete_directory_contents(self, dirpath, filter_func):
"""Delete all files in a directory.
:param dirpath: path to directory to clear
:type dirpath: ``str``
:param filter_func function to determine whether a file shall be
deleted or not.
:type filter_func ``callable``
"""
if os.path.exists(dirpath):
for filename in os.listdir(dirpath):
if not filter_func(filename):
continue
path = os.path.join(dirpath, filename)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.unlink(path)
self.logger.debug('deleted : %r', path)
def _load_info_plist(self):
"""Load workflow info from ``info.plist``."""
# info.plist should be in the directory above this one
with open(self.workflowfile('info.plist'), 'rb') as fp:
self._info = plistlib.load(fp)
self._info_loaded = True
def _create(self, dirpath):
"""Create directory `dirpath` if it doesn't exist.
:param dirpath: path to directory
:type dirpath: ``str``
:returns: ``dirpath`` argument
:rtype: ``str``
"""
if not os.path.exists(dirpath):
os.makedirs(dirpath)
return dirpath
def _call_security(self, action, service, account, *args):
"""Call ``security`` CLI program that provides access to keychains.
May raise `PasswordNotFound`, `PasswordExists` or `KeychainError`
exceptions (the first two are subclasses of `KeychainError`).
:param action: The ``security`` action to call, e.g.
``add-generic-password``
:type action: ``str``
:param service: Name of the service.
:type service: ``str``
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``str``
:param password: the password to secure
:type password: ``str``
:param *args: list of command line arguments to be passed to
``security``
:type *args: `list` or `tuple`
:returns: ``(retcode, output)``. ``retcode`` is an `int`, ``output`` a
``str`` string.
:rtype: `tuple` (`int`, ``str``)
"""
cmd = ['security', action, '-s', service, '-a', account] + list(args)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
if p.returncode == 44: # password does not exist
raise PasswordNotFound()
elif p.returncode == 45: # password already exists
raise PasswordExists()
elif p.returncode > 0:
err = KeychainError('Unknown Keychain error : %s' % stdout)
err.retcode = p.returncode
raise err
return stdout.strip().decode('utf-8')
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/workflow.py
|
workflow.py
|
import codecs
import json
import mimetypes
import os
import random
import re
import socket
import string
import unicodedata
import urllib.error
import urllib.parse
import urllib.request
import zlib
__version__ = open(os.path.join(os.path.dirname(__file__), 'version')).read()
USER_AGENT = ('Alfred-PyWorkflow/' + __version__ +
' (+https://xdevcloud.de/alfred-pyworkflow)')
# Valid characters for multipart form data boundaries
BOUNDARY_CHARS = string.digits + string.ascii_letters
# HTTP response codes
RESPONSES = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported'
}
def byte_dict(dic):
"""Convert keys and values in ``dic`` into UTF-8-encoded :class:`bytes`.
:param dic: Mapping of Unicode strings
:type dic: dict
:returns: Dictionary containing only UTF-8 encoded bytes
:rtype: dict
"""
if isinstance(dic, CaseInsensitiveDictionary):
dic2 = CaseInsensitiveDictionary()
else:
dic2 = {}
for k, v in dic.items():
if isinstance(k, str):
k = k.encode('utf-8')
if isinstance(v, str):
v = v.encode('utf-8')
dic2[k] = v
return dic2
class NoRedirectHandler(urllib.request.HTTPRedirectHandler):
"""Prevent redirections."""
def redirect_request(self, *args):
"""Ignore redirect."""
return None
# Adapted from https://gist.github.com/babakness/3901174
class CaseInsensitiveDictionary(dict):
"""Dictionary with caseless key search.
Enables case insensitive searching while preserving case sensitivity
when keys are listed, ie, via keys() or items() methods.
Works by storing a lowercase version of the key as the new key and
stores the original key-value pair as the key's value
(values become dictionaries).
"""
def __init__(self, initval=None):
"""Create new case-insensitive dictionary."""
if isinstance(initval, dict):
for key, value in initval.items():
self.__setitem__(key, value)
elif isinstance(initval, list):
for (key, value) in initval:
self.__setitem__(key, value)
def __contains__(self, key):
return dict.__contains__(self, key.lower())
def __getitem__(self, key):
return dict.__getitem__(self, key.lower())['val']
def __setitem__(self, key, value):
return dict.__setitem__(self, key.lower(), {'key': key, 'val': value})
def get(self, key, default=None):
"""Return value for case-insensitive key or default."""
try:
v = dict.__getitem__(self, key.lower())
except KeyError:
return default
else:
return v['val']
def update(self, other):
"""Update values from other ``dict``."""
for k, v in other.items():
self[k] = v
def items(self):
"""Iterate over ``(key, value)`` pairs."""
for v in dict.values(self):
yield v['key'], v['val']
def keys(self):
"""Iterate over original keys."""
for v in dict.values(self):
yield v['key']
def values(self):
"""Interate over values."""
for v in dict.values(self):
yield v['val']
class Request(urllib.request.Request):
"""Subclass of :class:`urllib.request.Request` that supports custom methods."""
def __init__(self, *args, **kwargs):
"""Create a new :class:`Request`."""
self._method = kwargs.pop('method', None)
urllib.request.Request.__init__(self, *args, **kwargs)
def get_method(self):
return self._method.upper()
class Response(object):
"""
Returned by :func:`request` / :func:`get` / :func:`post` functions.
Simplified version of the ``Response`` object in the ``requests`` library.
>>> r = request('http://www.google.com')
>>> r.status_code
200
>>> r.encoding
ISO-8859-1
>>> r.content # bytes
<html> ...
>>> r.text # str, decoded according to charset in HTTP header/meta tag
'<html> ...'
>>> r.json() # content parsed as JSON
"""
def __init__(self, request, stream=False):
"""Call `request` with :mod:`urllib.request` and process results.
:param request: :class:`Request` instance
:param stream: Whether to stream response or retrieve it all at once
:type stream: bool
"""
self.request = request
self._stream = stream
self.url = None
self.raw = None
self._encoding = None
self.error = None
self.status_code = None
self.reason = None
self.headers = CaseInsensitiveDictionary()
self._content = None
self._content_loaded = False
self._gzipped = False
# Execute query
try:
self.raw = urllib.request.urlopen(request)
except urllib.error.HTTPError as err:
self.error = err
try:
self.url = err.geturl()
# sometimes (e.g. when authentication fails)
# urllib can't get a URL from an HTTPError
# This behaviour changes across Python versions,
# so no test cover (it isn't important).
except AttributeError: # pragma: no cover
pass
self.status_code = err.code
else:
self.status_code = self.raw.getcode()
self.url = self.raw.geturl()
self.reason = RESPONSES.get(self.status_code)
# Parse additional info if request succeeded
if not self.error:
headers = self.raw.info()
self.transfer_encoding = headers.get_content_charset()
self.mimetype = headers.get_content_type()
for key in headers.keys():
self.headers[key.lower()] = headers.get(key)
# Is content gzipped?
# Transfer-Encoding appears to not be used in the wild
# (contrary to the HTTP standard), but no harm in testing
# for it
if 'gzip' in headers.get('content-encoding', '') or \
'gzip' in headers.get('transfer-encoding', ''):
self._gzipped = True
@property
def stream(self):
"""Whether response is streamed.
Returns:
bool: `True` if response is streamed.
"""
return self._stream
@stream.setter
def stream(self, value):
if self._content_loaded:
raise RuntimeError("`content` has already been read from "
"this Response.")
self._stream = value
def json(self):
"""Decode response contents as JSON.
:returns: object decoded from JSON
:rtype: list, dict or str
"""
return json.loads(self.content)
@property
def encoding(self):
"""Text encoding of document or ``None``.
:returns: Text encoding if found.
:rtype: str or ``None``
"""
if not self._encoding:
self._encoding = self._get_encoding()
return self._encoding
@property
def content(self):
"""Raw content of response (i.e. bytes).
:returns: Body of HTTP response
:rtype: str
"""
if not self._content:
# Decompress gzipped content
if self._gzipped:
decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._content = decoder.decompress(self.raw.read())
else:
self._content = self.raw.read()
self._content_loaded = True
return self._content
@property
def text(self):
"""Unicode-decoded content of response body.
If no encoding can be determined from HTTP headers or the content
itself, the encoded response body will be returned instead.
:returns: Body of HTTP response
:rtype: str or byte
"""
if self.encoding:
return unicodedata.normalize(
'NFC', str(self.content, self.encoding)
)
return self.content
def iter_content(self, chunk_size=4096, decode_unicode=False):
"""Iterate over response data.
.. versionadded:: 1.6
:param chunk_size: Number of bytes to read into memory
:type chunk_size: int
:param decode_unicode: Decode to Unicode using detected encoding
:type decode_unicode: bool
:returns: iterator
"""
if not self.stream:
raise RuntimeError("You cannot call `iter_content` on a "
"Response unless you passed `stream=True`"
" to `get()`/`post()`/`request()`.")
if self._content_loaded:
raise RuntimeError(
"`content` has already been read from this Response.")
def decode_stream(iterator, r):
dec = codecs.getincrementaldecoder(r.encoding)(errors='replace')
for chunk in iterator:
data = dec.decode(chunk)
if data:
yield data
data = dec.decode(b'', final=True)
if data: # pragma: no cover
yield data
def generate():
if self._gzipped:
decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
if self._gzipped:
chunk = decoder.decompress(chunk)
yield chunk
chunks = generate()
if decode_unicode and self.encoding:
chunks = decode_stream(chunks, self)
return chunks
def save_to_path(self, filepath):
"""Save retrieved data to file at ``filepath``.
.. versionadded: 1.9.6
:param filepath: Path to save retrieved data.
"""
filepath = os.path.abspath(filepath)
dirname = os.path.dirname(filepath)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.stream = True
with open(filepath, 'wb') as fileobj:
for data in self.iter_content():
fileobj.write(data)
def raise_for_status(self):
"""Raise stored error if one occurred.
error will be instance of :class:`urllib.error.HTTPError`
"""
if self.error is not None:
raise self.error
return
def _get_encoding(self):
"""Get encoding from HTTP headers or content.
:returns: encoding or `None`
:rtype: str or ``None``
"""
headers = self.raw.info()
encoding = None
if headers.get_content_charset():
encoding = headers.get_content_charset()
# HTTP Content-Type header
for param, value in (headers.get_params() or []):
if param.startswith('charset'):
encoding = value
break
if not self.stream: # Try sniffing response content
# Encoding declared in document should override HTTP headers
if self.mimetype == 'text/html': # sniff HTML headers
m = re.search(br"""<meta.+charset=["']{0,1}(.+?)["'].*>""",
self.content)
if m:
encoding = m.group(1).decode('utf8')
elif ((self.mimetype.startswith('application/')
or self.mimetype.startswith('text/'))
and 'xml' in self.mimetype):
m = re.search(br"""<?xml.+encoding=["'](.+?)["'][^>]*\?>""",
self.content)
if m:
encoding = m.group(1).decode('utf8')
# Format defaults
if self.mimetype == 'application/json' and not encoding:
# The default encoding for JSON
encoding = 'utf-8'
elif self.mimetype == 'application/xml' and not encoding:
# The default for 'application/xml'
encoding = 'utf-8'
if encoding:
encoding = encoding.lower()
return encoding
def request(method, url, params=None, data=None, headers=None, cookies=None,
files=None, auth=None, timeout=60, allow_redirects=False,
stream=False):
"""Initiate an HTTP(S) request. Returns :class:`Response` object.
:param method: 'GET' or 'POST'
:type method: str
:param url: URL to open
:type url: str
:param params: mapping of URL parameters
:type params: dict
:param data: mapping of form data ``{'field_name': 'value'}`` or
:class:`str`
:type data: dict or str
:param headers: HTTP headers
:type headers: dict
:param cookies: cookies to send to server
:type cookies: dict
:param files: files to upload (see below).
:type files: dict
:param auth: username, password
:type auth: tuple
:param timeout: connection timeout limit in seconds
:type timeout: int
:param allow_redirects: follow redirections
:type allow_redirects: bool
:param stream: Stream content instead of fetching it all at once.
:type stream: bool
:returns: Response object
:rtype: :class:`Response`
The ``files`` argument is a dictionary::
{'fieldname' : { 'filename': 'blah.txt',
'content': '<binary data>',
'mimetype': 'text/plain'}
}
* ``fieldname`` is the name of the field in the HTML form.
* ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
be used to guess the mimetype, or ``application/octet-stream``
will be used.
"""
# TODO: cookies
socket.setdefaulttimeout(timeout)
# Default handlers
openers = [urllib.request.ProxyHandler(urllib.request.getproxies())]
if not allow_redirects:
openers.append(NoRedirectHandler())
if auth is not None: # Add authorisation handler
username, password = auth
password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, url, username, password)
auth_manager = urllib.request.HTTPBasicAuthHandler(password_manager)
openers.append(auth_manager)
# Install our custom chain of openers
opener = urllib.request.build_opener(*openers)
urllib.request.install_opener(opener)
if not headers:
headers = CaseInsensitiveDictionary()
else:
headers = CaseInsensitiveDictionary(headers)
if 'user-agent' not in headers:
headers['user-agent'] = USER_AGENT
# Accept gzip-encoded content
encodings = [s.strip() for s in
headers.get('accept-encoding', '').split(',')]
if 'gzip' not in encodings:
encodings.append('gzip')
headers['accept-encoding'] = ', '.join(encodings)
if files:
if not data:
data = {}
new_headers, data = encode_multipart_formdata(data, files)
headers.update(new_headers)
elif data and isinstance(data, dict):
data = urllib.parse.urlencode(byte_dict(data))
# Make sure everything is encoded text
headers = byte_dict(headers)
if isinstance(data, str):
data = data.encode('utf-8')
if params: # GET args (POST args are handled in encode_multipart_formdata)
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
if query: # Combine query string and `params`
url_params = urllib.parse.parse_qs(query)
# `params` take precedence over URL query string
url_params.update(params)
params = url_params
query = urllib.parse.urlencode(byte_dict(params), doseq=True)
url = urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))
req = Request(url, data, headers, method=method)
return Response(req, stream)
def get(url, params=None, headers=None, cookies=None, auth=None,
timeout=60, allow_redirects=True, stream=False):
"""Initiate a GET request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('GET', url, params, headers=headers, cookies=cookies,
auth=auth, timeout=timeout, allow_redirects=allow_redirects,
stream=stream)
def delete(url, params=None, data=None, headers=None, cookies=None, auth=None,
timeout=60, allow_redirects=True, stream=False):
"""Initiate a DELETE request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('DELETE', url, params, data, headers=headers,
cookies=cookies, auth=auth, timeout=timeout,
allow_redirects=allow_redirects, stream=stream)
def post(url, params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=60, allow_redirects=False, stream=False):
"""Initiate a POST request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('POST', url, params, data, headers, cookies, files, auth,
timeout, allow_redirects, stream)
def put(url, params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=60, allow_redirects=False, stream=False):
"""Initiate a PUT request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('PUT', url, params, data, headers, cookies, files, auth,
timeout, allow_redirects, stream)
def encode_multipart_formdata(fields, files):
"""Encode form data (``fields``) and ``files`` for POST request.
:param fields: mapping of ``{name : value}`` pairs for normal form fields.
:type fields: dict
:param files: dictionary of fieldnames/files elements for file data.
See below for details.
:type files: dict of :class:`dict`
:returns: ``(headers, body)`` ``headers`` is a
:class:`dict` of HTTP headers
:rtype: 2-tuple ``(dict, str)``
The ``files`` argument is a dictionary::
{'fieldname' : { 'filename': 'blah.txt',
'content': '<binary data>',
'mimetype': 'text/plain'}
}
- ``fieldname`` is the name of the field in the HTML form.
- ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
be used to guess the mimetype, or ``application/octet-stream``
will be used.
"""
def get_content_type(filename):
"""Return or guess mimetype of ``filename``.
:param filename: filename of file
:type filename: str
:returns: mime-type, e.g. ``text/html``
:rtype: str
"""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
boundary = b'-----' + b''.join(
random.choice(BOUNDARY_CHARS).encode('ascii') for i in range(30)
)
CRLF = b'\r\n'
output = []
# Normal form fields
for (name, value) in fields.items():
if isinstance(name, str):
name = name.encode('utf-8')
if isinstance(value, str):
value = value.encode('utf-8')
output.append(b'--' + boundary)
output.append(b'Content-Disposition: form-data; name="%b"' % name)
output.append(b'')
output.append(value)
# Files to upload
for name, d in files.items():
filename = d['filename']
content = d['content']
if 'mimetype' in d:
mimetype = d['mimetype']
else:
mimetype = get_content_type(filename)
if isinstance(name, str):
name = name.encode('utf-8')
if isinstance(filename, str):
filename = filename.encode('utf-8')
if isinstance(mimetype, str):
mimetype = mimetype.encode('utf-8')
if isinstance(content, str):
content = content.encode('utf-8')
output.append(b'--' + boundary)
output.append(b'Content-Disposition: form-data; '
b'name="%b"; filename="%b"' % (name, filename))
output.append(b'Content-Type: %b' % mimetype)
output.append(b'')
output.append(content)
output.append(b'--' + boundary + b'--')
output.append(b'')
body = CRLF.join(output)
headers = {
'Content-Type': 'multipart/form-data; boundary=%s' % boundary.decode('ascii'),
}
return (headers, body)
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/web.py
|
web.py
|
from collections import defaultdict
from functools import total_ordering
import json
import os
import tempfile
import re
import subprocess
from . import workflow
from . import web
# __all__ = []
RELEASES_BASE = 'https://api.github.com/repos/{}/releases'
match_workflow = re.compile(r'\.alfred(\d+)?workflow$').search
_wf = None
def wf():
"""Lazy `Workflow` object."""
global _wf
if _wf is None:
_wf = workflow.Workflow()
return _wf
@total_ordering
class Download(object):
"""A workflow file that is available for download.
.. versionadded: 1.37
Attributes:
url (str): URL of workflow file.
filename (str): Filename of workflow file.
version (Version): Semantic version of workflow.
prerelease (bool): Whether version is a pre-release.
alfred_version (Version): Minimum compatible version
of Alfred.
"""
@classmethod
def from_dict(cls, d):
"""Create a `Download` from a `dict`."""
return cls(url=d['url'], filename=d['filename'],
version=Version(d['version']),
prerelease=d['prerelease'])
@classmethod
def from_releases(cls, js):
"""Extract downloads from GitHub releases.
Searches releases with semantic tags for assets with
file extension .alfredworkflow or .alfredXworkflow where
X is a number.
Files are returned sorted by latest version first. Any
releases containing multiple files with the same (workflow)
extension are rejected as ambiguous.
Args:
js (str): JSON response from GitHub's releases endpoint.
Returns:
list: Sequence of `Download`.
"""
releases = json.loads(js)
downloads = []
for release in releases:
tag = release['tag_name']
dupes = defaultdict(int)
try:
version = Version(tag)
except ValueError as err:
wf().logger.debug('ignored release: bad version "%s": %s',
tag, err)
continue
dls = []
for asset in release.get('assets', []):
url = asset.get('browser_download_url')
filename = os.path.basename(url)
m = match_workflow(filename)
if not m:
wf().logger.debug('unwanted file: %s', filename)
continue
ext = m.group(0)
dupes[ext] = dupes[ext] + 1
dls.append(Download(url, filename, version,
release['prerelease']))
valid = True
for ext, n in dupes.items():
if n > 1:
wf().logger.debug('ignored release "%s": multiple assets '
'with extension "%s"', tag, ext)
valid = False
break
if valid:
downloads.extend(dls)
downloads.sort(reverse=True)
return downloads
def __init__(self, url, filename, version, prerelease=False):
"""Create a new Download.
Args:
url (str): URL of workflow file.
filename (str): Filename of workflow file.
version (Version): Version of workflow.
prerelease (bool, optional): Whether version is
pre-release. Defaults to False.
"""
if isinstance(version, str):
version = Version(version)
self.url = url
self.filename = filename
self.version = version
self.prerelease = prerelease
@property
def alfred_version(self):
"""Minimum Alfred version based on filename extension."""
m = match_workflow(self.filename)
if not m or not m.group(1):
return Version('0')
return Version(m.group(1))
@property
def dict(self):
"""Convert `Download` to `dict`."""
return dict(url=self.url, filename=self.filename,
version=str(self.version), prerelease=self.prerelease)
def __str__(self):
"""Format `Download` for printing."""
return ('Download(url={dl.url!r}, '
'filename={dl.filename!r}, '
'version={dl.version!r}, '
'prerelease={dl.prerelease!r})'.format(dl=self))
def __repr__(self):
"""Code-like representation of `Download`."""
return str(self)
def __eq__(self, other):
"""Compare Downloads based on version numbers."""
if self.url != other.url \
or self.filename != other.filename \
or self.version != other.version \
or self.prerelease != other.prerelease:
return False
return True
def __ne__(self, other):
"""Compare Downloads based on version numbers."""
return not self.__eq__(other)
def __lt__(self, other):
"""Compare Downloads based on version numbers."""
if self.version != other.version:
return self.version < other.version
return self.alfred_version < other.alfred_version
class Version(object):
"""Mostly semantic versioning.
The main difference to proper :ref:`semantic versioning <semver>`
is that this implementation doesn't require a minor or patch version.
Version strings may also be prefixed with "v", e.g.:
>>> v = Version('v1.1.1')
>>> v.tuple
(1, 1, 1, '')
>>> v = Version('2.0')
>>> v.tuple
(2, 0, 0, '')
>>> Version('3.1-beta').tuple
(3, 1, 0, 'beta')
>>> Version('1.0.1') > Version('0.0.1')
True
"""
#: Match version and pre-release/build information in version strings
match_version = re.compile(r'([0-9][0-9\.]*)(.+)?').match
def __init__(self, vstr):
"""Create new `Version` object.
Args:
vstr (``str``): Semantic version string.
"""
if not vstr:
raise ValueError('invalid version number: {!r}'.format(vstr))
self.vstr = vstr
self.major = 0
self.minor = 0
self.patch = 0
self.suffix = ''
self.build = ''
self._parse(vstr)
def _parse(self, vstr):
if vstr.startswith('v'):
m = self.match_version(vstr[1:])
else:
m = self.match_version(vstr)
if not m:
raise ValueError('invalid version number: ' + vstr)
version, suffix = m.groups()
parts = self._parse_dotted_string(version)
self.major = parts.pop(0)
if len(parts):
self.minor = parts.pop(0)
if len(parts):
self.patch = parts.pop(0)
if not len(parts) == 0:
raise ValueError('version number too long: ' + vstr)
if suffix:
# Build info
idx = suffix.find('+')
if idx > -1:
self.build = suffix[idx+1:]
suffix = suffix[:idx]
if suffix:
if not suffix.startswith('-'):
raise ValueError(
'suffix must start with - : ' + suffix)
self.suffix = suffix[1:]
def _parse_dotted_string(self, s):
"""Parse string ``s`` into list of ints and strings."""
parsed = []
parts = s.split('.')
for p in parts:
if p.isdigit():
p = int(p)
parsed.append(p)
return parsed
@property
def tuple(self):
"""Version number as a tuple of major, minor, patch, pre-release."""
return (self.major, self.minor, self.patch, self.suffix)
def __lt__(self, other):
"""Implement comparison.
.. versionadded:: 1.x
.. versionchanged:: 2.0
"""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
t = self.tuple[:3]
o = other.tuple[:3]
if t < o:
return True
if t == o: # We need to compare suffixes
if self.suffix and not other.suffix:
return True
if other.suffix and not self.suffix:
return False
lft = self._parse_dotted_string(self.suffix)
rgt = self._parse_dotted_string(other.suffix)
try:
return lft < rgt
except TypeError:
# Python 3 will not allow lt/gt comparisons of int & str.
while lft and rgt and lft[0] == rgt[0]:
lft.pop(0)
rgt.pop(0)
# Alphanumeric versions are earlier than numeric versions,
# therefore lft < rgt if the right version is numeric.
return isinstance(rgt[0], int)
# t > o
return False
def __eq__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
return self.tuple == other.tuple
def __ne__(self, other):
"""Implement comparison."""
return not self.__eq__(other)
def __gt__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
return other.__lt__(self)
def __le__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
return not other.__lt__(self)
def __ge__(self, other):
"""Implement comparison."""
return not self.__lt__(other)
def __str__(self):
"""Return semantic version string."""
vstr = '{0}.{1}.{2}'.format(self.major, self.minor, self.patch)
if self.suffix:
vstr = '{0}-{1}'.format(vstr, self.suffix)
if self.build:
vstr = '{0}+{1}'.format(vstr, self.build)
return vstr
def __repr__(self):
"""Return 'code' representation of `Version`."""
return "Version('{0}')".format(str(self))
def retrieve_download(dl):
"""Saves a download to a temporary file and returns path.
.. versionadded: 1.37
Args:
url (str): URL to .alfredworkflow file in GitHub repo
Returns:
str: path to downloaded file
"""
if not match_workflow(dl.filename):
raise ValueError('attachment not a workflow: ' + dl.filename)
path = os.path.join(tempfile.gettempdir(), dl.filename)
wf().logger.debug('downloading update from '
'%r to %r ...', dl.url, path)
r = web.get(dl.url)
r.raise_for_status()
r.save_to_path(path)
return path
def build_api_url(repo):
"""Generate releases URL from GitHub repo.
Args:
repo (str): Repo name in form ``username/repo``
Returns:
str: URL to the API endpoint for the repo's releases
"""
if len(repo.split('/')) != 2:
raise ValueError('invalid GitHub repo: {!r}'.format(repo))
return RELEASES_BASE.format(repo)
def get_downloads(repo):
"""Load available ``Download``s for GitHub repo.
.. versionadded: 1.37
Args:
repo (str): GitHub repo to load releases for.
Returns:
list: Sequence of `Download` contained in GitHub releases.
"""
url = build_api_url(repo)
def _fetch():
wf().logger.info('retrieving releases for %r ...', repo)
r = web.get(url)
r.raise_for_status()
return r.content
key = 'github-releases-' + repo.replace('/', '-')
js = wf().cached_data(key, _fetch, max_age=60)
return Download.from_releases(js)
def latest_download(dls, alfred_version=None, prereleases=False):
"""Return newest `Download`."""
alfred_version = alfred_version or os.getenv('alfred_version')
version = None
if alfred_version:
version = Version(alfred_version)
dls.sort(reverse=True)
for dl in dls:
if dl.prerelease and not prereleases:
wf().logger.debug('ignored prerelease: %s', dl.version)
continue
if version and dl.alfred_version > version:
wf().logger.debug('ignored incompatible (%s > %s): %s',
dl.alfred_version, version, dl.filename)
continue
wf().logger.debug('latest version: %s (%s)', dl.version, dl.filename)
return dl
return None
def check_update(repo, current_version, prereleases=False,
alfred_version=None):
"""Check whether a newer release is available on GitHub.
Args:
repo (str): ``username/repo`` for workflow's GitHub repo
current_version (str): the currently installed version of the
workflow. :ref:`Semantic versioning <semver>` is required.
prereleases (bool): Whether to include pre-releases.
alfred_version (str): version of currently-running Alfred.
if empty, defaults to ``$alfred_version`` environment variable.
Returns:
bool: ``True`` if an update is available, else ``False``
If an update is available, its version number and download URL will
be cached.
"""
key = '__workflow_latest_version'
# data stored when no update is available
no_update = {
'available': False,
'download': None,
'version': None,
}
current = Version(current_version)
dls = get_downloads(repo)
if not len(dls):
wf().logger.warning('no valid downloads for %s', repo)
wf().cache_data(key, no_update)
return False
wf().logger.info('%d download(s) for %s', len(dls), repo)
dl = latest_download(dls, alfred_version, prereleases)
if not dl:
wf().logger.warning('no compatible downloads for %s', repo)
wf().cache_data(key, no_update)
return False
wf().logger.debug('latest=%r, installed=%r', dl.version, current)
if dl.version > current:
wf().cache_data(key, {
'version': str(dl.version),
'download': dl.dict,
'available': True,
})
return True
wf().cache_data(key, no_update)
return False
def install_update():
"""If a newer release is available, download and install it.
:returns: ``True`` if an update is installed, else ``False``
"""
key = '__workflow_latest_version'
# data stored when no update is available
no_update = {
'available': False,
'download': None,
'version': None,
}
status = wf().cached_data(key, max_age=0)
if not status or not status.get('available'):
wf().logger.info('no update available')
return False
dl = status.get('download')
if not dl:
wf().logger.info('no download information')
return False
path = retrieve_download(Download.from_dict(dl))
wf().logger.info('installing updated workflow ...')
subprocess.call(['open', path]) # nosec
wf().cache_data(key, no_update)
return True
if __name__ == '__main__': # pragma: nocover
import sys
prereleases = False
def show_help(status=0):
"""Print help message."""
print('usage: update.py (check|install) '
'[--prereleases] <repo> <version>')
sys.exit(status)
argv = sys.argv[:]
if '-h' in argv or '--help' in argv:
show_help()
if '--prereleases' in argv:
argv.remove('--prereleases')
prereleases = True
if len(argv) != 4:
show_help(1)
action = argv[1]
repo = argv[2]
version = argv[3]
try:
if action == 'check':
check_update(repo, version, prereleases)
elif action == 'install':
install_update()
else:
show_help(1)
except Exception as err: # ensure traceback is in log file
wf().logger.exception(err)
raise err
|
Alfred-PyWorkflow
|
/Alfred-PyWorkflow-2.0.0b0.tar.gz/Alfred-PyWorkflow-2.0.0b0/workflow/update.py
|
update.py
|
<div align="center">
<img src="./icon.png" alt="Alfred-Workflow logo" height="200">
</div>
Alfred-Workflow
===============
A helper library in Python for authors of workflows for [Alfred 3 and 4][alfred].
<!-- [![Build Status][shield-travis]][travis] -->
[![Build Status][shield-github]][action-github]
[![Coverage Status][shield-coveralls]][coveralls]
[![Development Status][shield-status]][pypi]
[![Latest Version][shield-version]][pypi]
[![Supported Python Versions][shield-pyversions]][pypi]
<!-- [![Downloads][shield-download]][pypi] -->
Supports Alfred 3 and Alfred 4 on macOS 10.7+ (Python 2.7).
Alfred-Workflow takes the grunt work out of writing a workflow by giving you the tools to create a fast and featureful Alfred workflow from an API, application or library in minutes.
Always supports all current Alfred features.
Features
--------
- Auto-saved settings API for your workflow
- Super-simple data caching with expiry
- Fuzzy filtering (with smart diacritic folding)
- Keychain support for secure storage of passwords, API keys etc.
- Lightweight web API with [Requests][requests]-like interface
- Background tasks to keep your workflow responsive
- Simple generation of Alfred JSON feedback
- Full support of Alfred's AppleScript/JXA API
- Catches and logs workflow errors for easier development and support
- "Magic" arguments to help development/debugging
- Unicode support
- Pre-configured logging
- Automatically check for workflow updates via GitHub releases
- Post notifications via Notification Center
### Alfred 4+ features ###
- Advanced modifiers
- Alfred 4-only updates (won't break older Alfred installs)
Contents
--------
<!-- MarkdownTOC autolink="true" bracket="round" depth="3" autoanchor="true" -->
- [Installation](#installation)
- [With pip](#with-pip)
- [From source](#from-source)
- [Usage](#usage)
- [Workflow script skeleton](#workflow-script-skeleton)
- [Examples](#examples)
- [Web](#web)
- [Keychain access](#keychain-access)
- [Documentation](#documentation)
- [Dash docset](#dash-docset)
- [Licensing, thanks](#licensing-thanks)
- [Contributing](#contributing)
- [Adding a workflow to the list](#adding-a-workflow-to-the-list)
- [Bug reports, pull requests](#bug-reports-pull-requests)
- [Contributors](#contributors)
- [Workflows using Alfred-Workflow](#workflows-using-alfred-workflow)
<!-- /MarkdownTOC -->
<a name="installation"></a>
Installation
------------
**Note**: If you're new to Alfred workflows, check out
[the tutorial][docs-tutorial] in the docs.
<a name="with-pip"></a>
### With pip ###
You can install Alfred-Workflow directly into your workflow with:
```bash
# from your workflow directory
pip install --target=. Alfred-Workflow
```
You can install any other library available on the [Cheese Shop][cheeseshop] the same way. See the [pip documentation][pip-docs] for more information.
It is highly advisable to bundle all your workflow's dependencies with your workflow in this way. That way, it will "just work".
<a name="from-source"></a>
### From source ###
1. Download the `alfred-workflow-X.X.X.zip` from the [GitHub releases page][releases].
2. Extract the ZIP archive and place the `workflow` directory in the root folder of your workflow (where `info.plist` is).
Your workflow should look something like this:
Your Workflow/
info.plist
icon.png
workflow/
__init__.py
background.py
notify.py
Notify.tgz
update.py
version
web.py
workflow.py
yourscript.py
etc.
Alternatively, you can clone/download the Alfred-Workflow [repository][repo] and copy the `workflow` subdirectory to your workflow's root directory.
<a name="usage"></a>
Usage
-----
A few examples of how to use Alfred-Workflow.
<a name="workflow-script-skeleton"></a>
### Workflow script skeleton ###
Set up your workflow scripts as follows (if you wish to use the built-in error handling or `sys.path` modification):
```python
#!/usr/bin/python
# encoding: utf-8
import sys
# Workflow3 supports Alfred 3's new features. The `Workflow` class
# is also compatible with Alfred 2.
from workflow import Workflow3
def main(wf):
# The Workflow3 instance will be passed to the function
# you call from `Workflow3.run`.
# Not super useful, as the `wf` object created in
# the `if __name__ ...` clause below is global...
#
# Your imports go here if you want to catch import errors, which
# is not a bad idea, or if the modules/packages are in a directory
# added via `Workflow3(libraries=...)`
import somemodule
import anothermodule
# Get args from Workflow3, already in normalized Unicode.
# This is also necessary for "magic" arguments to work.
args = wf.args
# Do stuff here ...
# Add an item to Alfred feedback
wf.add_item(u'Item title', u'Item subtitle')
# Send output to Alfred. You can only call this once.
# Well, you *can* call it multiple times, but subsequent calls
# are ignored (otherwise the JSON sent to Alfred would be invalid).
wf.send_feedback()
if __name__ == '__main__':
# Create a global `Workflow3` object
wf = Workflow3()
# Call your entry function via `Workflow3.run()` to enable its
# helper functions, like exception catching, ARGV normalization,
# magic arguments etc.
sys.exit(wf.run(main))
```
<a name="examples"></a>
### Examples ###
Cache data for 30 seconds:
```python
def get_web_data():
return web.get('http://www.example.com').json()
def main(wf):
# Save data from `get_web_data` for 30 seconds under
# the key ``example``
data = wf.cached_data('example', get_web_data, max_age=30)
for datum in data:
wf.add_item(datum['title'], datum['author'])
wf.send_feedback()
```
<a name="web"></a>
#### Web ####
Grab data from a JSON web API:
```python
data = web.get('http://www.example.com/api/1/stuff').json()
```
Post a form:
```python
r = web.post('http://www.example.com/',
data={'artist': 'Tom Jones', 'song': "It's not unusual"})
```
Upload a file:
```python
files = {'fieldname' : {'filename': "It's not unusual.mp3",
'content': open("It's not unusual.mp3", 'rb').read()}
}
r = web.post('http://www.example.com/upload/', files=files)
```
**WARNING**: As this module is based on Python 2's standard HTTP libraries, *on old versions of OS X/Python, it does not validate SSL certificates when making HTTPS connections*. If your workflow uses sensitive passwords/API keys, you should *strongly consider* using the [requests][requests] library upon which the `web.py` API is based.
<a name="keychain-access"></a>
#### Keychain access ####
Save password:
```python
wf = Workflow()
wf.save_password('name of account', 'password1lolz')
```
Retrieve password:
```python
wf = Workflow()
wf.get_password('name of account')
```
<a name="documentation"></a>
Documentation
-------------
The full documentation, including API docs and a tutorial, can be found at [deanishe.net][docs].
<a name="dash-docset"></a>
### Dash docset ###
The documentation is also available as a [Dash docset][dash].
<a name="licensing-thanks"></a>
Licensing, thanks
-----------------
The code and the documentation are released under the MIT and [Creative Commons Attribution-NonCommercial][cc] licences respectively. See [LICENCE.txt](LICENCE.txt) for details.
The documentation was generated using [Sphinx][sphinx] and a modified version of the [Alabaster][alabaster] theme by [bitprophet][bitprophet].
Many of the cooler ideas in Alfred-Workflow were inspired by [Alfred2-Ruby-Template][ruby-template] by Zhaocai.
The Keychain parser was based on [Python-Keyring][python-keyring] by Jason R. Coombs.
<a name="contributing"></a>
Contributing
------------
<a name="adding-a-workflow-to-the-list"></a>
### Adding a workflow to the list ###
If you want to add a workflow to the [list of workflows using Alfred-Workflow][docs-workflows], **don't add it to the docs!** The list is machine-generated from [Packal.org][packal] and the [`library_workflows.tsv`](extras/library_workflows.tsv) file. If your workflow is available on [Packal][packal], it will be added on the next update. If not, please add it to [`library_workflows.tsv`](extras/library_workflows.tsv), and submit a corresponding pull request.
The list is not auto-updated, so if you've released a workflow and are keen to see it in this list, please [open an issue][issues] asking me to update the list.
<a name="bug-reports-pull-requests"></a>
### Bug reports, pull requests ###
Please see [the documentation][docs-contributing].
<a name="contributors"></a>
### Contributors ###
- [Dean Jackson][deanishe]
- [Stephen Margheim][smargh]
- [Fabio Niephaus][fniephaus]
- [Owen Min][owenwater]
<a name="workflows-using-alfred-workflow"></a>
Workflows using Alfred-Workflow
-------------------------------
[Here is a list][docs-workflows] of some of the many workflows based on Alfred-Workflow.
[alfred]: http://www.alfredapp.com/
[awv2]: https://github.com/deanishe/alfred-workflow/tree/v2
[alabaster]: https://github.com/bitprophet/alabaster
[bitprophet]: https://github.com/bitprophet
[cc]: https://creativecommons.org/licenses/by-nc/4.0/legalcode
[coveralls]: https://coveralls.io/r/deanishe/alfred-workflow?branch=master
[deanishe]: https://github.com/deanishe
[docs-contributing]: http://www.deanishe.net/alfred-workflow/contributing.html
[docs-tutorial]: http://www.deanishe.net/alfred-workflow/tutorial.html
[docs-api]: http://www.deanishe.net/alfred-workflow/api/
[docs]: http://www.deanishe.net/alfred-workflow/
[docs-workflows]: http://www.deanishe.net/alfred-workflow/aw-workflows.html
[dash]: https://github.com/deanishe/alfred-workflow/raw/master/docs/Alfred-Workflow.docset.zip
[fniephaus]: https://github.com/fniephaus
[owenwater]: https://github.com/owenwater
[issues]: https://github.com/deanishe/alfred-workflow/issues
[landscape]: https://landscape.io/github/deanishe/alfred-workflow/master
[packal]: http://www.packal.org/
[pep8]: http://legacy.python.org/dev/peps/pep-0008/
[pulls]: https://github.com/deanishe/alfred-workflow/pulls
[pypi]: https://pypi.python.org/pypi/Alfred-Workflow/
[releases]: https://github.com/deanishe/alfred-workflow/releases
[repo]: https://github.com/deanishe/alfred-workflow
[requests]: http://docs.python-requests.org/en/latest/
[rtd]: https://readthedocs.org/
[shield-coveralls]: https://coveralls.io/repos/github/deanishe/alfred-workflow/badge.svg?branch=master
[shield-docs]: https://readthedocs.org/projects/alfredworkflow/badge/?version=latest&style=flat
[shield-download]: https://img.shields.io/pypi/dm/Alfred-Workflow.svg?style=flat
[shield-github]: https://github.com/deanishe/alfred-workflow/workflows/CI/badge.svg
[action-github]: https://github.com/deanishe/alfred-workflow/actions?query=workflow%3ACI
[shield-health]: https://landscape.io/github/deanishe/alfred-workflow/master/landscape.png?style=flat
[shield-licence]: https://pypip.in/license/Alfred-Workflow/badge.svg?style=flat
[shield-status]: https://img.shields.io/pypi/status/Alfred-Workflow.svg?style=flat
[shield-travis]: https://travis-ci.org/deanishe/alfred-workflow.svg?branch=master&style=flat
[shield-version]: https://img.shields.io/pypi/v/Alfred-Workflow.svg?style=flat
[shield-pyversions]: https://img.shields.io/pypi/pyversions/Alfred-Workflow.svg?style=flat
[smargh]: https://github.com/smargh
[sphinx]: http://sphinx-doc.org/
[travis]: https://travis-ci.org/deanishe/alfred-workflow
[cheeseshop]: https://pypi.python.org/pypi
[pip-docs]: https://pip.pypa.io/en/latest/
[ruby-template]: http://zhaocai.github.io/alfred2-ruby-template/
[python-keyring]: https://pypi.python.org/pypi/keyring
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/README.md
|
README.md
|
A helper library for writing `Alfred 2, 3 and 4`_ workflows.
Supports macOS 10.7+ and Python 2.7 (Alfred 3 is 10.9+/2.7 only).
Alfred-Workflow is designed to take the grunt work out of writing a workflow.
It gives you the tools to create a fast and featureful Alfred workflow from an
API, application or library in minutes.
http://www.deanishe.net/alfred-workflow/
Features
========
* Catches and logs workflow errors for easier development and support
* "Magic" arguments to help development/debugging
* Auto-saves settings
* Super-simple data caching
* Fuzzy, Alfred-like search/filtering with diacritic folding
* Keychain support for secure storage (and syncing) of passwords, API keys etc.
* Simple generation of Alfred feedback (XML output)
* Input/output decoding for handling non-ASCII text
* Lightweight web API with modelled on `requests`_
* Pre-configured logging
* Painlessly add directories to ``sys.path``
* Easily launch background tasks (daemons) to keep your workflow responsive
* Check for new versions and update workflows hosted on GitHub.
* Post notifications via Notification Center.
Alfred 3-only features
----------------------
* Set `workflow variables`_ from code
* Advanced modifiers
* Alfred 3-only updates (won't break Alfred 2 installs)
* Re-running Script Filters
Quick Example
=============
Here's how to show recent `Pinboard.in <https://pinboard.in/>`_ posts
in Alfred.
Create a new workflow in Alfred's preferences. Add a **Script Filter** with
Language ``/usr/bin/python`` and paste the following into the **Script**
field (changing ``API_KEY``):
.. code-block:: python
import sys
from workflow import Workflow, ICON_WEB, web
API_KEY = 'your-pinboard-api-key'
def main(wf):
url = 'https://api.pinboard.in/v1/posts/recent'
params = dict(auth_token=API_KEY, count=20, format='json')
r = web.get(url, params)
r.raise_for_status()
for post in r.json()['posts']:
wf.add_item(post['description'], post['href'], arg=post['href'],
uid=post['hash'], valid=True, icon=ICON_WEB)
wf.send_feedback()
if __name__ == u"__main__":
wf = Workflow()
sys.exit(wf.run(main))
Add an **Open URL** action to your workflow with ``{query}`` as the **URL**,
connect your **Script Filter** to it, and you can now hit **ENTER** on a
Pinboard item in Alfred to open it in your browser.
Installation
============
**Note**: If you intend to distribute your workflow to other users, you
should include Alfred-Workflow (and other Python libraries your workflow
requires) within your workflow's directory as described below. **Do not**
ask users to install anything into their system Python. Python installations
cannot support multiple versions of the same library, so if you rely on
globally-installed libraries, the chances are very good that your workflow
will sooner or later break—or be broken by—some other software doing the
same naughty thing.
With pip
--------
You can install Alfred-Workflow directly into your workflow with::
# from within your workflow directory
pip install --target=. Alfred-Workflow
You can install any other library available on the `Cheese Shop`_ the
same way. See the `pip documentation`_ for more information.
From source
-----------
Download the ``alfred-workflow-X.X.X.zip`` file from the `GitHub releases`_
page and extract the ZIP to the root directory of your workflow (where
``info.plist`` is).
Alternatively, you can download `the source code`_ from the
`GitHub repository`_ and copy the ``workflow`` subfolder to the root
directory of your workflow.
Your workflow directory should look something like this (where
``yourscript.py`` contains your workflow code and ``info.plist`` is
the workflow information file generated by Alfred)::
Your Workflow/
info.plist
icon.png
workflow/
__init__.py
background.py
notify.py
Notify.tgz
update.py
version
web.py
workflow.py
yourscript.py
etc.
Documentation
=============
Detailed documentation, including a tutorial, is available at
http://www.deanishe.net/alfred-workflow/.
.. _v2 branch: https://github.com/deanishe/alfred-workflow/tree/v2
.. _requests: http://docs.python-requests.org/en/latest/
.. _Alfred 2, 3 and 4: http://www.alfredapp.com/
.. _GitHub releases: https://github.com/deanishe/alfred-workflow/releases
.. _the source code: https://github.com/deanishe/alfred-workflow/archive/master.zip
.. _GitHub repository: https://github.com/deanishe/alfred-workflow
.. _Cheese Shop: https://pypi.python.org/pypi
.. _pip documentation: https://pip.pypa.io/en/latest/
.. _workflow variables: http://www.deanishe.net/alfred-workflow/user-manual/workflow-variables.html
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/README_PYPI.rst
|
README_PYPI.rst
|
from __future__ import print_function, absolute_import
import atexit
from collections import namedtuple
from contextlib import contextmanager
import errno
import fcntl
import functools
import json
import os
import signal
import subprocess
import sys
from threading import Event
import time
# JXA scripts to call Alfred's API via the Scripting Bridge
# {app} is automatically replaced with "Alfred 3" or
# "com.runningwithcrayons.Alfred" depending on version.
#
# Open Alfred in search (regular) mode
JXA_SEARCH = 'Application({app}).search({arg});'
# Open Alfred's File Actions on an argument
JXA_ACTION = 'Application({app}).action({arg});'
# Open Alfred's navigation mode at path
JXA_BROWSE = 'Application({app}).browse({arg});'
# Set the specified theme
JXA_SET_THEME = 'Application({app}).setTheme({arg});'
# Call an External Trigger
JXA_TRIGGER = 'Application({app}).runTrigger({arg}, {opts});'
# Save a variable to the workflow configuration sheet/info.plist
JXA_SET_CONFIG = 'Application({app}).setConfiguration({arg}, {opts});'
# Delete a variable from the workflow configuration sheet/info.plist
JXA_UNSET_CONFIG = 'Application({app}).removeConfiguration({arg}, {opts});'
# Tell Alfred to reload a workflow from disk
JXA_RELOAD_WORKFLOW = 'Application({app}).reloadWorkflow({arg});'
class AcquisitionError(Exception):
"""Raised if a lock cannot be acquired."""
AppInfo = namedtuple('AppInfo', ['name', 'path', 'bundleid'])
"""Information about an installed application.
Returned by :func:`appinfo`. All attributes are Unicode.
.. py:attribute:: name
Name of the application, e.g. ``u'Safari'``.
.. py:attribute:: path
Path to the application bundle, e.g. ``u'/Applications/Safari.app'``.
.. py:attribute:: bundleid
Application's bundle ID, e.g. ``u'com.apple.Safari'``.
"""
def jxa_app_name():
"""Return name of application to call currently running Alfred.
.. versionadded: 1.37
Returns 'Alfred 3' or 'com.runningwithcrayons.Alfred' depending
on which version of Alfred is running.
This name is suitable for use with ``Application(name)`` in JXA.
Returns:
unicode: Application name or ID.
"""
if os.getenv('alfred_version', '').startswith('3'):
# Alfred 3
return u'Alfred 3'
# Alfred 4+
return u'com.runningwithcrayons.Alfred'
def unicodify(s, encoding='utf-8', norm=None):
"""Ensure string is Unicode.
.. versionadded:: 1.31
Decode encoded strings using ``encoding`` and normalise Unicode
to form ``norm`` if specified.
Args:
s (str): String to decode. May also be Unicode.
encoding (str, optional): Encoding to use on bytestrings.
norm (None, optional): Normalisation form to apply to Unicode string.
Returns:
unicode: Decoded, optionally normalised, Unicode string.
"""
if not isinstance(s, unicode):
s = unicode(s, encoding)
if norm:
from unicodedata import normalize
s = normalize(norm, s)
return s
def utf8ify(s):
"""Ensure string is a bytestring.
.. versionadded:: 1.31
Returns `str` objects unchanced, encodes `unicode` objects to
UTF-8, and calls :func:`str` on anything else.
Args:
s (object): A Python object
Returns:
str: UTF-8 string or string representation of s.
"""
if isinstance(s, str):
return s
if isinstance(s, unicode):
return s.encode('utf-8')
return str(s)
def applescriptify(s):
"""Escape string for insertion into an AppleScript string.
.. versionadded:: 1.31
Replaces ``"`` with `"& quote &"`. Use this function if you want
to insert a string into an AppleScript script:
>>> applescriptify('g "python" test')
'g " & quote & "python" & quote & "test'
Args:
s (unicode): Unicode string to escape.
Returns:
unicode: Escaped string.
"""
return s.replace(u'"', u'" & quote & "')
def run_command(cmd, **kwargs):
"""Run a command and return the output.
.. versionadded:: 1.31
A thin wrapper around :func:`subprocess.check_output` that ensures
all arguments are encoded to UTF-8 first.
Args:
cmd (list): Command arguments to pass to :func:`~subprocess.check_output`.
**kwargs: Keyword arguments to pass to :func:`~subprocess.check_output`.
Returns:
str: Output returned by :func:`~subprocess.check_output`.
"""
cmd = [utf8ify(s) for s in cmd]
return subprocess.check_output(cmd, **kwargs)
def run_applescript(script, *args, **kwargs):
"""Execute an AppleScript script and return its output.
.. versionadded:: 1.31
Run AppleScript either by filepath or code. If ``script`` is a valid
filepath, that script will be run, otherwise ``script`` is treated
as code.
Args:
script (str, optional): Filepath of script or code to run.
*args: Optional command-line arguments to pass to the script.
**kwargs: Pass ``lang`` to run a language other than AppleScript.
Any other keyword arguments are passed to :func:`run_command`.
Returns:
str: Output of run command.
"""
lang = 'AppleScript'
if 'lang' in kwargs:
lang = kwargs['lang']
del kwargs['lang']
cmd = ['/usr/bin/osascript', '-l', lang]
if os.path.exists(script):
cmd += [script]
else:
cmd += ['-e', script]
cmd.extend(args)
return run_command(cmd, **kwargs)
def run_jxa(script, *args):
"""Execute a JXA script and return its output.
.. versionadded:: 1.31
Wrapper around :func:`run_applescript` that passes ``lang=JavaScript``.
Args:
script (str): Filepath of script or code to run.
*args: Optional command-line arguments to pass to script.
Returns:
str: Output of script.
"""
return run_applescript(script, *args, lang='JavaScript')
def run_trigger(name, bundleid=None, arg=None):
"""Call an Alfred External Trigger.
.. versionadded:: 1.31
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
name (str): Name of External Trigger to call.
bundleid (str, optional): Bundle ID of workflow trigger belongs to.
arg (str, optional): Argument to pass to trigger.
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
opts = {'inWorkflow': bundleid}
if arg:
opts['withArgument'] = arg
script = JXA_TRIGGER.format(app=json.dumps(appname),
arg=json.dumps(name),
opts=json.dumps(opts, sort_keys=True))
run_applescript(script, lang='JavaScript')
def set_theme(theme_name):
"""Change Alfred's theme.
.. versionadded:: 1.39.0
Args:
theme_name (unicode): Name of theme Alfred should use.
"""
appname = jxa_app_name()
script = JXA_SET_THEME.format(app=json.dumps(appname),
arg=json.dumps(theme_name))
run_applescript(script, lang='JavaScript')
def set_config(name, value, bundleid=None, exportable=False):
"""Set a workflow variable in ``info.plist``.
.. versionadded:: 1.33
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
name (str): Name of variable to set.
value (str): Value to set variable to.
bundleid (str, optional): Bundle ID of workflow variable belongs to.
exportable (bool, optional): Whether variable should be marked
as exportable (Don't Export checkbox).
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
opts = {
'toValue': value,
'inWorkflow': bundleid,
'exportable': exportable,
}
script = JXA_SET_CONFIG.format(app=json.dumps(appname),
arg=json.dumps(name),
opts=json.dumps(opts, sort_keys=True))
run_applescript(script, lang='JavaScript')
def unset_config(name, bundleid=None):
"""Delete a workflow variable from ``info.plist``.
.. versionadded:: 1.33
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
name (str): Name of variable to delete.
bundleid (str, optional): Bundle ID of workflow variable belongs to.
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
opts = {'inWorkflow': bundleid}
script = JXA_UNSET_CONFIG.format(app=json.dumps(appname),
arg=json.dumps(name),
opts=json.dumps(opts, sort_keys=True))
run_applescript(script, lang='JavaScript')
def search_in_alfred(query=None):
"""Open Alfred with given search query.
.. versionadded:: 1.39.0
Omit ``query`` to simply open Alfred's main window.
Args:
query (unicode, optional): Search query.
"""
query = query or u''
appname = jxa_app_name()
script = JXA_SEARCH.format(app=json.dumps(appname), arg=json.dumps(query))
run_applescript(script, lang='JavaScript')
def browse_in_alfred(path):
"""Open Alfred's filesystem navigation mode at ``path``.
.. versionadded:: 1.39.0
Args:
path (unicode): File or directory path.
"""
appname = jxa_app_name()
script = JXA_BROWSE.format(app=json.dumps(appname), arg=json.dumps(path))
run_applescript(script, lang='JavaScript')
def action_in_alfred(paths):
"""Action the give filepaths in Alfred.
.. versionadded:: 1.39.0
Args:
paths (list): Unicode paths to files/directories to action.
"""
appname = jxa_app_name()
script = JXA_ACTION.format(app=json.dumps(appname), arg=json.dumps(paths))
run_applescript(script, lang='JavaScript')
def reload_workflow(bundleid=None):
"""Tell Alfred to reload a workflow from disk.
.. versionadded:: 1.39.0
If ``bundleid`` is not specified, the bundle ID of the calling
workflow is used.
Args:
bundleid (unicode, optional): Bundle ID of workflow to reload.
"""
bundleid = bundleid or os.getenv('alfred_workflow_bundleid')
appname = jxa_app_name()
script = JXA_RELOAD_WORKFLOW.format(app=json.dumps(appname),
arg=json.dumps(bundleid))
run_applescript(script, lang='JavaScript')
def appinfo(name):
"""Get information about an installed application.
.. versionadded:: 1.31
Args:
name (str): Name of application to look up.
Returns:
AppInfo: :class:`AppInfo` tuple or ``None`` if app isn't found.
"""
cmd = [
'mdfind',
'-onlyin', '/Applications',
'-onlyin', '/System/Applications',
'-onlyin', os.path.expanduser('~/Applications'),
'(kMDItemContentTypeTree == com.apple.application &&'
'(kMDItemDisplayName == "{0}" || kMDItemFSName == "{0}.app"))'
.format(name)
]
output = run_command(cmd).strip()
if not output:
return None
path = output.split('\n')[0]
cmd = ['mdls', '-raw', '-name', 'kMDItemCFBundleIdentifier', path]
bid = run_command(cmd).strip()
if not bid: # pragma: no cover
return None
return AppInfo(unicodify(name), unicodify(path), unicodify(bid))
@contextmanager
def atomic_writer(fpath, mode):
"""Atomic file writer.
.. versionadded:: 1.12
Context manager that ensures the file is only written if the write
succeeds. The data is first written to a temporary file.
:param fpath: path of file to write to.
:type fpath: ``unicode``
:param mode: sames as for :func:`open`
:type mode: string
"""
suffix = '.{}.tmp'.format(os.getpid())
temppath = fpath + suffix
with open(temppath, mode) as fp:
try:
yield fp
os.rename(temppath, fpath)
finally:
try:
os.remove(temppath)
except (OSError, IOError):
pass
class LockFile(object):
"""Context manager to protect filepaths with lockfiles.
.. versionadded:: 1.13
Creates a lockfile alongside ``protected_path``. Other ``LockFile``
instances will refuse to lock the same path.
>>> path = '/path/to/file'
>>> with LockFile(path):
>>> with open(path, 'wb') as fp:
>>> fp.write(data)
Args:
protected_path (unicode): File to protect with a lockfile
timeout (float, optional): Raises an :class:`AcquisitionError`
if lock cannot be acquired within this number of seconds.
If ``timeout`` is 0 (the default), wait forever.
delay (float, optional): How often to check (in seconds) if
lock has been released.
Attributes:
delay (float): How often to check (in seconds) whether the lock
can be acquired.
lockfile (unicode): Path of the lockfile.
timeout (float): How long to wait to acquire the lock.
"""
def __init__(self, protected_path, timeout=0.0, delay=0.05):
"""Create new :class:`LockFile` object."""
self.lockfile = protected_path + '.lock'
self._lockfile = None
self.timeout = timeout
self.delay = delay
self._lock = Event()
atexit.register(self.release)
@property
def locked(self):
"""``True`` if file is locked by this instance."""
return self._lock.is_set()
def acquire(self, blocking=True):
"""Acquire the lock if possible.
If the lock is in use and ``blocking`` is ``False``, return
``False``.
Otherwise, check every :attr:`delay` seconds until it acquires
lock or exceeds attr:`timeout` and raises an :class:`AcquisitionError`.
"""
if self.locked and not blocking:
return False
start = time.time()
while True:
# Raise error if we've been waiting too long to acquire the lock
if self.timeout and (time.time() - start) >= self.timeout:
raise AcquisitionError('lock acquisition timed out')
# If already locked, wait then try again
if self.locked:
time.sleep(self.delay)
continue
# Create in append mode so we don't lose any contents
if self._lockfile is None:
self._lockfile = open(self.lockfile, 'a')
# Try to acquire the lock
try:
fcntl.lockf(self._lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
self._lock.set()
break
except IOError as err: # pragma: no cover
if err.errno not in (errno.EACCES, errno.EAGAIN):
raise
# Don't try again
if not blocking: # pragma: no cover
return False
# Wait, then try again
time.sleep(self.delay)
return True
def release(self):
"""Release the lock by deleting `self.lockfile`."""
if not self._lock.is_set():
return False
try:
fcntl.lockf(self._lockfile, fcntl.LOCK_UN)
except IOError: # pragma: no cover
pass
finally:
self._lock.clear()
self._lockfile = None
try:
os.unlink(self.lockfile)
except (IOError, OSError): # pragma: no cover
pass
return True
def __enter__(self):
"""Acquire lock."""
self.acquire()
return self
def __exit__(self, typ, value, traceback):
"""Release lock."""
self.release()
def __del__(self):
"""Clear up `self.lockfile`."""
self.release() # pragma: no cover
class uninterruptible(object):
"""Decorator that postpones SIGTERM until wrapped function returns.
.. versionadded:: 1.12
.. important:: This decorator is NOT thread-safe.
As of version 2.7, Alfred allows Script Filters to be killed. If
your workflow is killed in the middle of critical code (e.g.
writing data to disk), this may corrupt your workflow's data.
Use this decorator to wrap critical functions that *must* complete.
If the script is killed while a wrapped function is executing,
the SIGTERM will be caught and handled after your function has
finished executing.
Alfred-Workflow uses this internally to ensure its settings, data
and cache writes complete.
"""
def __init__(self, func, class_name=''):
"""Decorate `func`."""
self.func = func
functools.update_wrapper(self, func)
self._caught_signal = None
def signal_handler(self, signum, frame):
"""Called when process receives SIGTERM."""
self._caught_signal = (signum, frame)
def __call__(self, *args, **kwargs):
"""Trap ``SIGTERM`` and call wrapped function."""
self._caught_signal = None
# Register handler for SIGTERM, then call `self.func`
self.old_signal_handler = signal.getsignal(signal.SIGTERM)
signal.signal(signal.SIGTERM, self.signal_handler)
self.func(*args, **kwargs)
# Restore old signal handler
signal.signal(signal.SIGTERM, self.old_signal_handler)
# Handle any signal caught during execution
if self._caught_signal is not None:
signum, frame = self._caught_signal
if callable(self.old_signal_handler):
self.old_signal_handler(signum, frame)
elif self.old_signal_handler == signal.SIG_DFL:
sys.exit(0)
def __get__(self, obj=None, klass=None):
"""Decorator API."""
return self.__class__(self.func.__get__(obj, klass),
klass.__name__)
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/util.py
|
util.py
|
from __future__ import print_function, unicode_literals, absolute_import
import json
import os
import sys
from .workflow import ICON_WARNING, Workflow
class Variables(dict):
"""Workflow variables for Run Script actions.
.. versionadded: 1.26
This class allows you to set workflow variables from
Run Script actions.
It is a subclass of :class:`dict`.
>>> v = Variables(username='deanishe', password='hunter2')
>>> v.arg = u'output value'
>>> print(v)
See :ref:`variables-run-script` in the User Guide for more
information.
Args:
arg (unicode or list, optional): Main output/``{query}``.
**variables: Workflow variables to set.
In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
Attributes:
arg (unicode or list): Output value (``{query}``).
In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
config (dict): Configuration for downstream workflow element.
"""
def __init__(self, arg=None, **variables):
"""Create a new `Variables` object."""
self.arg = arg
self.config = {}
super(Variables, self).__init__(**variables)
@property
def obj(self):
"""``alfredworkflow`` :class:`dict`."""
o = {}
if self:
d2 = {}
for k, v in self.items():
d2[k] = v
o['variables'] = d2
if self.config:
o['config'] = self.config
if self.arg is not None:
o['arg'] = self.arg
return {'alfredworkflow': o}
def __unicode__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
unicode: ``alfredworkflow`` JSON object
"""
if not self and not self.config:
if not self.arg:
return u''
if isinstance(self.arg, unicode):
return self.arg
return json.dumps(self.obj)
def __str__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
str: UTF-8 encoded ``alfredworkflow`` JSON object
"""
return unicode(self).encode('utf-8')
class Modifier(object):
"""Modify :class:`Item3` arg/icon/variables when modifier key is pressed.
Don't use this class directly (as it won't be associated with any
:class:`Item3`), but rather use :meth:`Item3.add_modifier()`
to add modifiers to results.
>>> it = wf.add_item('Title', 'Subtitle', valid=True)
>>> it.setvar('name', 'default')
>>> m = it.add_modifier('cmd')
>>> m.setvar('name', 'alternate')
See :ref:`workflow-variables` in the User Guide for more information
and :ref:`example usage <example-variables>`.
Args:
key (unicode): Modifier key, e.g. ``"cmd"``, ``"alt"`` etc.
subtitle (unicode, optional): Override default subtitle.
arg (unicode, optional): Argument to pass for this modifier.
valid (bool, optional): Override item's validity.
icon (unicode, optional): Filepath/UTI of icon to use
icontype (unicode, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
Attributes:
arg (unicode): Arg to pass to following action.
config (dict): Configuration for a downstream element, such as
a File Filter.
icon (unicode): Filepath/UTI of icon.
icontype (unicode): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
key (unicode): Modifier key (see above).
subtitle (unicode): Override item subtitle.
valid (bool): Override item validity.
variables (dict): Workflow variables set by this modifier.
"""
def __init__(self, key, subtitle=None, arg=None, valid=None, icon=None,
icontype=None):
"""Create a new :class:`Modifier`.
Don't use this class directly (as it won't be associated with any
:class:`Item3`), but rather use :meth:`Item3.add_modifier()`
to add modifiers to results.
Args:
key (unicode): Modifier key, e.g. ``"cmd"``, ``"alt"`` etc.
subtitle (unicode, optional): Override default subtitle.
arg (unicode, optional): Argument to pass for this modifier.
valid (bool, optional): Override item's validity.
icon (unicode, optional): Filepath/UTI of icon to use
icontype (unicode, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
"""
self.key = key
self.subtitle = subtitle
self.arg = arg
self.valid = valid
self.icon = icon
self.icontype = icontype
self.config = {}
self.variables = {}
def setvar(self, name, value):
"""Set a workflow variable for this Item.
Args:
name (unicode): Name of variable.
value (unicode): Value of variable.
"""
self.variables[name] = value
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (unicode): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
unicode or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
@property
def obj(self):
"""Modifier formatted for JSON serialization for Alfred 3.
Returns:
dict: Modifier for serializing to JSON.
"""
o = {}
if self.subtitle is not None:
o['subtitle'] = self.subtitle
if self.arg is not None:
o['arg'] = self.arg
if self.valid is not None:
o['valid'] = self.valid
if self.variables:
o['variables'] = self.variables
if self.config:
o['config'] = self.config
icon = self._icon()
if icon:
o['icon'] = icon
return o
def _icon(self):
"""Return `icon` object for item.
Returns:
dict: Mapping for item `icon` (may be empty).
"""
icon = {}
if self.icon is not None:
icon['path'] = self.icon
if self.icontype is not None:
icon['type'] = self.icontype
return icon
class Item3(object):
"""Represents a feedback item for Alfred 3+.
Generates Alfred-compliant JSON for a single item.
Don't use this class directly (as it then won't be associated with
any :class:`Workflow3 <workflow.Workflow3>` object), but rather use
:meth:`Workflow3.add_item() <workflow.Workflow3.add_item>`.
See :meth:`~workflow.Workflow3.add_item` for details of arguments.
"""
def __init__(self, title, subtitle='', arg=None, autocomplete=None,
match=None, valid=False, uid=None, icon=None, icontype=None,
type=None, largetext=None, copytext=None, quicklookurl=None):
"""Create a new :class:`Item3` object.
Use same arguments as for
:class:`Workflow.Item <workflow.Workflow.Item>`.
Argument ``subtitle_modifiers`` is not supported.
"""
self.title = title
self.subtitle = subtitle
self.arg = arg
self.autocomplete = autocomplete
self.match = match
self.valid = valid
self.uid = uid
self.icon = icon
self.icontype = icontype
self.type = type
self.quicklookurl = quicklookurl
self.largetext = largetext
self.copytext = copytext
self.modifiers = {}
self.config = {}
self.variables = {}
def setvar(self, name, value):
"""Set a workflow variable for this Item.
Args:
name (unicode): Name of variable.
value (unicode): Value of variable.
"""
self.variables[name] = value
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (unicode): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
unicode or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
def add_modifier(self, key, subtitle=None, arg=None, valid=None, icon=None,
icontype=None):
"""Add alternative values for a modifier key.
Args:
key (unicode): Modifier key, e.g. ``"cmd"`` or ``"alt"``
subtitle (unicode, optional): Override item subtitle.
arg (unicode, optional): Input for following action.
valid (bool, optional): Override item validity.
icon (unicode, optional): Filepath/UTI of icon.
icontype (unicode, optional): Type of icon. See
:meth:`Workflow.add_item() <workflow.Workflow.add_item>`
for valid values.
In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
Returns:
Modifier: Configured :class:`Modifier`.
"""
mod = Modifier(key, subtitle, arg, valid, icon, icontype)
# Add Item variables to Modifier
mod.variables.update(self.variables)
self.modifiers[key] = mod
return mod
@property
def obj(self):
"""Item formatted for JSON serialization.
Returns:
dict: Data suitable for Alfred 3 feedback.
"""
# Required values
o = {
'title': self.title,
'subtitle': self.subtitle,
'valid': self.valid,
}
# Optional values
if self.arg is not None:
o['arg'] = self.arg
if self.autocomplete is not None:
o['autocomplete'] = self.autocomplete
if self.match is not None:
o['match'] = self.match
if self.uid is not None:
o['uid'] = self.uid
if self.type is not None:
o['type'] = self.type
if self.quicklookurl is not None:
o['quicklookurl'] = self.quicklookurl
if self.variables:
o['variables'] = self.variables
if self.config:
o['config'] = self.config
# Largetype and copytext
text = self._text()
if text:
o['text'] = text
icon = self._icon()
if icon:
o['icon'] = icon
# Modifiers
mods = self._modifiers()
if mods:
o['mods'] = mods
return o
def _icon(self):
"""Return `icon` object for item.
Returns:
dict: Mapping for item `icon` (may be empty).
"""
icon = {}
if self.icon is not None:
icon['path'] = self.icon
if self.icontype is not None:
icon['type'] = self.icontype
return icon
def _text(self):
"""Return `largetext` and `copytext` object for item.
Returns:
dict: `text` mapping (may be empty)
"""
text = {}
if self.largetext is not None:
text['largetype'] = self.largetext
if self.copytext is not None:
text['copy'] = self.copytext
return text
def _modifiers(self):
"""Build `mods` dictionary for JSON feedback.
Returns:
dict: Modifier mapping or `None`.
"""
if self.modifiers:
mods = {}
for k, mod in self.modifiers.items():
mods[k] = mod.obj
return mods
return None
class Workflow3(Workflow):
"""Workflow class that generates Alfred 3+ feedback.
It is a subclass of :class:`~workflow.Workflow` and most of its
methods are documented there.
Attributes:
item_class (class): Class used to generate feedback items.
variables (dict): Top level workflow variables.
"""
item_class = Item3
def __init__(self, **kwargs):
"""Create a new :class:`Workflow3` object.
See :class:`~workflow.Workflow` for documentation.
"""
Workflow.__init__(self, **kwargs)
self.variables = {}
self._rerun = 0
# Get session ID from environment if present
self._session_id = os.getenv('_WF_SESSION_ID') or None
if self._session_id:
self.setvar('_WF_SESSION_ID', self._session_id)
@property
def _default_cachedir(self):
"""Alfred 4's default cache directory."""
return os.path.join(
os.path.expanduser(
'~/Library/Caches/com.runningwithcrayons.Alfred/'
'Workflow Data/'),
self.bundleid)
@property
def _default_datadir(self):
"""Alfred 4's default data directory."""
return os.path.join(os.path.expanduser(
'~/Library/Application Support/Alfred/Workflow Data/'),
self.bundleid)
@property
def rerun(self):
"""How often (in seconds) Alfred should re-run the Script Filter."""
return self._rerun
@rerun.setter
def rerun(self, seconds):
"""Interval at which Alfred should re-run the Script Filter.
Args:
seconds (int): Interval between runs.
"""
self._rerun = seconds
@property
def session_id(self):
"""A unique session ID every time the user uses the workflow.
.. versionadded:: 1.25
The session ID persists while the user is using this workflow.
It expires when the user runs a different workflow or closes
Alfred.
"""
if not self._session_id:
from uuid import uuid4
self._session_id = uuid4().hex
self.setvar('_WF_SESSION_ID', self._session_id)
return self._session_id
def setvar(self, name, value, persist=False):
"""Set a "global" workflow variable.
.. versionchanged:: 1.33
These variables are always passed to downstream workflow objects.
If you have set :attr:`rerun`, these variables are also passed
back to the script when Alfred runs it again.
Args:
name (unicode): Name of variable.
value (unicode): Value of variable.
persist (bool, optional): Also save variable to ``info.plist``?
"""
self.variables[name] = value
if persist:
from .util import set_config
set_config(name, value, self.bundleid)
self.logger.debug('saved variable %r with value %r to info.plist',
name, value)
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
Args:
name (unicode): Variable name.
default (None, optional): Value to return if variable is unset.
Returns:
unicode or ``default``: Value of variable if set or ``default``.
"""
return self.variables.get(name, default)
def add_item(self, title, subtitle='', arg=None, autocomplete=None,
valid=False, uid=None, icon=None, icontype=None, type=None,
largetext=None, copytext=None, quicklookurl=None, match=None):
"""Add an item to be output to Alfred.
Args:
match (unicode, optional): If you have "Alfred filters results"
turned on for your Script Filter, Alfred (version 3.5 and
above) will filter against this field, not ``title``.
In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
:class:`list` or :class:`tuple`.
See :meth:`Workflow.add_item() <workflow.Workflow.add_item>` for
the main documentation and other parameters.
The key difference is that this method does not support the
``modifier_subtitles`` argument. Use the :meth:`~Item3.add_modifier()`
method instead on the returned item instead.
Returns:
Item3: Alfred feedback item.
"""
item = self.item_class(title, subtitle, arg, autocomplete,
match, valid, uid, icon, icontype, type,
largetext, copytext, quicklookurl)
# Add variables to child item
item.variables.update(self.variables)
self._items.append(item)
return item
@property
def _session_prefix(self):
"""Filename prefix for current session."""
return '_wfsess-{0}-'.format(self.session_id)
def _mk_session_name(self, name):
"""New cache name/key based on session ID."""
return self._session_prefix + name
def cache_data(self, name, data, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Args:
name (str): Cache key
data (object): Data to cache
session (bool, optional): Whether to scope the cache
to the current session.
``name`` and ``data`` are the same as for the
:meth:`~workflow.Workflow.cache_data` method on
:class:`~workflow.Workflow`.
If ``session`` is ``True``, then ``name`` is prefixed
with :attr:`session_id`.
"""
if session:
name = self._mk_session_name(name)
return super(Workflow3, self).cache_data(name, data)
def cached_data(self, name, data_func=None, max_age=60, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Args:
name (str): Cache key
data_func (callable): Callable that returns fresh data. It
is called if the cache has expired or doesn't exist.
max_age (int): Maximum allowable age of cache in seconds.
session (bool, optional): Whether to scope the cache
to the current session.
``name``, ``data_func`` and ``max_age`` are the same as for the
:meth:`~workflow.Workflow.cached_data` method on
:class:`~workflow.Workflow`.
If ``session`` is ``True``, then ``name`` is prefixed
with :attr:`session_id`.
"""
if session:
name = self._mk_session_name(name)
return super(Workflow3, self).cached_data(name, data_func, max_age)
def clear_session_cache(self, current=False):
"""Remove session data from the cache.
.. versionadded:: 1.25
.. versionchanged:: 1.27
By default, data belonging to the current session won't be
deleted. Set ``current=True`` to also clear current session.
Args:
current (bool, optional): If ``True``, also remove data for
current session.
"""
def _is_session_file(filename):
if current:
return filename.startswith('_wfsess-')
return filename.startswith('_wfsess-') \
and not filename.startswith(self._session_prefix)
self.clear_cache(_is_session_file)
@property
def obj(self):
"""Feedback formatted for JSON serialization.
Returns:
dict: Data suitable for Alfred 3 feedback.
"""
items = []
for item in self._items:
items.append(item.obj)
o = {'items': items}
if self.variables:
o['variables'] = self.variables
if self.rerun:
o['rerun'] = self.rerun
return o
def warn_empty(self, title, subtitle=u'', icon=None):
"""Add a warning to feedback if there are no items.
.. versionadded:: 1.31
Add a "warning" item to Alfred feedback if no other items
have been added. This is a handy shortcut to prevent Alfred
from showing its fallback searches, which is does if no
items are returned.
Args:
title (unicode): Title of feedback item.
subtitle (unicode, optional): Subtitle of feedback item.
icon (str, optional): Icon for feedback item. If not
specified, ``ICON_WARNING`` is used.
Returns:
Item3: Newly-created item.
"""
if len(self._items):
return
icon = icon or ICON_WARNING
return self.add_item(title, subtitle, icon=icon)
def send_feedback(self):
"""Print stored items to console/Alfred as JSON."""
if self.debugging:
json.dump(self.obj, sys.stdout, indent=2, separators=(',', ': '))
else:
json.dump(self.obj, sys.stdout)
sys.stdout.flush()
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/workflow3.py
|
workflow3.py
|
# TODO: Exclude this module from test and code coverage in py2.6
"""
Post notifications via the macOS Notification Center.
This feature is only available on Mountain Lion (10.8) and later.
It will silently fail on older systems.
The main API is a single function, :func:`~workflow.notify.notify`.
It works by copying a simple application to your workflow's data
directory. It replaces the application's icon with your workflow's
icon and then calls the application to post notifications.
"""
from __future__ import print_function, unicode_literals
import os
import plistlib
import shutil
import subprocess
import sys
import tarfile
import tempfile
import uuid
import workflow
_wf = None
_log = None
#: Available system sounds from System Preferences > Sound > Sound Effects
SOUNDS = (
'Basso',
'Blow',
'Bottle',
'Frog',
'Funk',
'Glass',
'Hero',
'Morse',
'Ping',
'Pop',
'Purr',
'Sosumi',
'Submarine',
'Tink',
)
def wf():
"""Return Workflow object for this module.
Returns:
workflow.Workflow: Workflow object for current workflow.
"""
global _wf
if _wf is None:
_wf = workflow.Workflow()
return _wf
def log():
"""Return logger for this module.
Returns:
logging.Logger: Logger for this module.
"""
global _log
if _log is None:
_log = wf().logger
return _log
def notifier_program():
"""Return path to notifier applet executable.
Returns:
unicode: Path to Notify.app ``applet`` executable.
"""
return wf().datafile('Notify.app/Contents/MacOS/applet')
def notifier_icon_path():
"""Return path to icon file in installed Notify.app.
Returns:
unicode: Path to ``applet.icns`` within the app bundle.
"""
return wf().datafile('Notify.app/Contents/Resources/applet.icns')
def install_notifier():
"""Extract ``Notify.app`` from the workflow to data directory.
Changes the bundle ID of the installed app and gives it the
workflow's icon.
"""
archive = os.path.join(os.path.dirname(__file__), 'Notify.tgz')
destdir = wf().datadir
app_path = os.path.join(destdir, 'Notify.app')
n = notifier_program()
log().debug('installing Notify.app to %r ...', destdir)
# z = zipfile.ZipFile(archive, 'r')
# z.extractall(destdir)
tgz = tarfile.open(archive, 'r:gz')
tgz.extractall(destdir)
if not os.path.exists(n): # pragma: nocover
raise RuntimeError('Notify.app could not be installed in ' + destdir)
# Replace applet icon
icon = notifier_icon_path()
workflow_icon = wf().workflowfile('icon.png')
if os.path.exists(icon):
os.unlink(icon)
png_to_icns(workflow_icon, icon)
# Set file icon
# PyObjC isn't available for 2.6, so this is 2.7 only. Actually,
# none of this code will "work" on pre-10.8 systems. Let it run
# until I figure out a better way of excluding this module
# from coverage in py2.6.
if sys.version_info >= (2, 7): # pragma: no cover
from AppKit import NSWorkspace, NSImage
ws = NSWorkspace.sharedWorkspace()
img = NSImage.alloc().init()
img.initWithContentsOfFile_(icon)
ws.setIcon_forFile_options_(img, app_path, 0)
# Change bundle ID of installed app
ip_path = os.path.join(app_path, 'Contents/Info.plist')
bundle_id = '{0}.{1}'.format(wf().bundleid, uuid.uuid4().hex)
data = plistlib.readPlist(ip_path)
log().debug('changing bundle ID to %r', bundle_id)
data['CFBundleIdentifier'] = bundle_id
plistlib.writePlist(data, ip_path)
def validate_sound(sound):
"""Coerce ``sound`` to valid sound name.
Returns ``None`` for invalid sounds. Sound names can be found
in ``System Preferences > Sound > Sound Effects``.
Args:
sound (str): Name of system sound.
Returns:
str: Proper name of sound or ``None``.
"""
if not sound:
return None
# Case-insensitive comparison of `sound`
if sound.lower() in [s.lower() for s in SOUNDS]:
# Title-case is correct for all system sounds as of macOS 10.11
return sound.title()
return None
def notify(title='', text='', sound=None):
"""Post notification via Notify.app helper.
Args:
title (str, optional): Notification title.
text (str, optional): Notification body text.
sound (str, optional): Name of sound to play.
Raises:
ValueError: Raised if both ``title`` and ``text`` are empty.
Returns:
bool: ``True`` if notification was posted, else ``False``.
"""
if title == text == '':
raise ValueError('Empty notification')
sound = validate_sound(sound) or ''
n = notifier_program()
if not os.path.exists(n):
install_notifier()
env = os.environ.copy()
enc = 'utf-8'
env['NOTIFY_TITLE'] = title.encode(enc)
env['NOTIFY_MESSAGE'] = text.encode(enc)
env['NOTIFY_SOUND'] = sound.encode(enc)
cmd = [n]
retcode = subprocess.call(cmd, env=env)
if retcode == 0:
return True
log().error('Notify.app exited with status {0}.'.format(retcode))
return False
def convert_image(inpath, outpath, size):
"""Convert an image file using ``sips``.
Args:
inpath (str): Path of source file.
outpath (str): Path to destination file.
size (int): Width and height of destination image in pixels.
Raises:
RuntimeError: Raised if ``sips`` exits with non-zero status.
"""
cmd = [
b'sips',
b'-z', str(size), str(size),
inpath,
b'--out', outpath]
# log().debug(cmd)
with open(os.devnull, 'w') as pipe:
retcode = subprocess.call(cmd, stdout=pipe, stderr=subprocess.STDOUT)
if retcode != 0:
raise RuntimeError('sips exited with %d' % retcode)
def png_to_icns(png_path, icns_path):
"""Convert PNG file to ICNS using ``iconutil``.
Create an iconset from the source PNG file. Generate PNG files
in each size required by macOS, then call ``iconutil`` to turn
them into a single ICNS file.
Args:
png_path (str): Path to source PNG file.
icns_path (str): Path to destination ICNS file.
Raises:
RuntimeError: Raised if ``iconutil`` or ``sips`` fail.
"""
tempdir = tempfile.mkdtemp(prefix='aw-', dir=wf().datadir)
try:
iconset = os.path.join(tempdir, 'Icon.iconset')
if os.path.exists(iconset): # pragma: nocover
raise RuntimeError('iconset already exists: ' + iconset)
os.makedirs(iconset)
# Copy source icon to icon set and generate all the other
# sizes needed
configs = []
for i in (16, 32, 128, 256, 512):
configs.append(('icon_{0}x{0}.png'.format(i), i))
configs.append((('icon_{0}x{0}@2x.png'.format(i), i * 2)))
shutil.copy(png_path, os.path.join(iconset, 'icon_256x256.png'))
shutil.copy(png_path, os.path.join(iconset, '[email protected]'))
for name, size in configs:
outpath = os.path.join(iconset, name)
if os.path.exists(outpath):
continue
convert_image(png_path, outpath, size)
cmd = [
b'iconutil',
b'-c', b'icns',
b'-o', icns_path,
iconset]
retcode = subprocess.call(cmd)
if retcode != 0:
raise RuntimeError('iconset exited with %d' % retcode)
if not os.path.exists(icns_path): # pragma: nocover
raise ValueError(
'generated ICNS file not found: ' + repr(icns_path))
finally:
try:
shutil.rmtree(tempdir)
except OSError: # pragma: no cover
pass
if __name__ == '__main__': # pragma: nocover
# Simple command-line script to test module with
# This won't work on 2.6, as `argparse` isn't available
# by default.
import argparse
from unicodedata import normalize
def ustr(s):
"""Coerce `s` to normalised Unicode."""
return normalize('NFD', s.decode('utf-8'))
p = argparse.ArgumentParser()
p.add_argument('-p', '--png', help="PNG image to convert to ICNS.")
p.add_argument('-l', '--list-sounds', help="Show available sounds.",
action='store_true')
p.add_argument('-t', '--title',
help="Notification title.", type=ustr,
default='')
p.add_argument('-s', '--sound', type=ustr,
help="Optional notification sound.", default='')
p.add_argument('text', type=ustr,
help="Notification body text.", default='', nargs='?')
o = p.parse_args()
# List available sounds
if o.list_sounds:
for sound in SOUNDS:
print(sound)
sys.exit(0)
# Convert PNG to ICNS
if o.png:
icns = os.path.join(
os.path.dirname(o.png),
os.path.splitext(os.path.basename(o.png))[0] + '.icns')
print('converting {0!r} to {1!r} ...'.format(o.png, icns),
file=sys.stderr)
if os.path.exists(icns):
raise ValueError('destination file already exists: ' + icns)
png_to_icns(o.png, icns)
sys.exit(0)
# Post notification
if o.title == o.text == '':
print('ERROR: empty notification.', file=sys.stderr)
sys.exit(1)
else:
notify(o.title, o.text, o.sound)
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/notify.py
|
notify.py
|
from __future__ import print_function, unicode_literals
import signal
import sys
import os
import subprocess
import pickle
from workflow import Workflow
__all__ = ['is_running', 'run_in_background']
_wf = None
def wf():
global _wf
if _wf is None:
_wf = Workflow()
return _wf
def _log():
return wf().logger
def _arg_cache(name):
"""Return path to pickle cache file for arguments.
:param name: name of task
:type name: ``unicode``
:returns: Path to cache file
:rtype: ``unicode`` filepath
"""
return wf().cachefile(name + '.argcache')
def _pid_file(name):
"""Return path to PID file for ``name``.
:param name: name of task
:type name: ``unicode``
:returns: Path to PID file for task
:rtype: ``unicode`` filepath
"""
return wf().cachefile(name + '.pid')
def _process_exists(pid):
"""Check if a process with PID ``pid`` exists.
:param pid: PID to check
:type pid: ``int``
:returns: ``True`` if process exists, else ``False``
:rtype: ``Boolean``
"""
try:
os.kill(pid, 0)
except OSError: # not running
return False
return True
def _job_pid(name):
"""Get PID of job or `None` if job does not exist.
Args:
name (str): Name of job.
Returns:
int: PID of job process (or `None` if job doesn't exist).
"""
pidfile = _pid_file(name)
if not os.path.exists(pidfile):
return
with open(pidfile, 'rb') as fp:
pid = int(fp.read())
if _process_exists(pid):
return pid
os.unlink(pidfile)
def is_running(name):
"""Test whether task ``name`` is currently running.
:param name: name of task
:type name: unicode
:returns: ``True`` if task with name ``name`` is running, else ``False``
:rtype: bool
"""
if _job_pid(name) is not None:
return True
return False
def _background(pidfile, stdin='/dev/null', stdout='/dev/null',
stderr='/dev/null'): # pragma: no cover
"""Fork the current process into a background daemon.
:param pidfile: file to write PID of daemon process to.
:type pidfile: filepath
:param stdin: where to read input
:type stdin: filepath
:param stdout: where to write stdout output
:type stdout: filepath
:param stderr: where to write stderr output
:type stderr: filepath
"""
def _fork_and_exit_parent(errmsg, wait=False, write=False):
try:
pid = os.fork()
if pid > 0:
if write: # write PID of child process to `pidfile`
tmp = pidfile + '.tmp'
with open(tmp, 'wb') as fp:
fp.write(str(pid))
os.rename(tmp, pidfile)
if wait: # wait for child process to exit
os.waitpid(pid, 0)
os._exit(0)
except OSError as err:
_log().critical('%s: (%d) %s', errmsg, err.errno, err.strerror)
raise err
# Do first fork and wait for second fork to finish.
_fork_and_exit_parent('fork #1 failed', wait=True)
# Decouple from parent environment.
os.chdir(wf().workflowdir)
os.setsid()
# Do second fork and write PID to pidfile.
_fork_and_exit_parent('fork #2 failed', write=True)
# Now I am a daemon!
# Redirect standard file descriptors.
si = open(stdin, 'r', 0)
so = open(stdout, 'a+', 0)
se = open(stderr, 'a+', 0)
if hasattr(sys.stdin, 'fileno'):
os.dup2(si.fileno(), sys.stdin.fileno())
if hasattr(sys.stdout, 'fileno'):
os.dup2(so.fileno(), sys.stdout.fileno())
if hasattr(sys.stderr, 'fileno'):
os.dup2(se.fileno(), sys.stderr.fileno())
def kill(name, sig=signal.SIGTERM):
"""Send a signal to job ``name`` via :func:`os.kill`.
.. versionadded:: 1.29
Args:
name (str): Name of the job
sig (int, optional): Signal to send (default: SIGTERM)
Returns:
bool: `False` if job isn't running, `True` if signal was sent.
"""
pid = _job_pid(name)
if pid is None:
return False
os.kill(pid, sig)
return True
def run_in_background(name, args, **kwargs):
r"""Cache arguments then call this script again via :func:`subprocess.call`.
:param name: name of job
:type name: unicode
:param args: arguments passed as first argument to :func:`subprocess.call`
:param \**kwargs: keyword arguments to :func:`subprocess.call`
:returns: exit code of sub-process
:rtype: int
When you call this function, it caches its arguments and then calls
``background.py`` in a subprocess. The Python subprocess will load the
cached arguments, fork into the background, and then run the command you
specified.
This function will return as soon as the ``background.py`` subprocess has
forked, returning the exit code of *that* process (i.e. not of the command
you're trying to run).
If that process fails, an error will be written to the log file.
If a process is already running under the same name, this function will
return immediately and will not run the specified command.
"""
if is_running(name):
_log().info('[%s] job already running', name)
return
argcache = _arg_cache(name)
# Cache arguments
with open(argcache, 'wb') as fp:
pickle.dump({'args': args, 'kwargs': kwargs}, fp)
_log().debug('[%s] command cached: %s', name, argcache)
# Call this script
cmd = ['/usr/bin/python', __file__, name]
_log().debug('[%s] passing job to background runner: %r', name, cmd)
retcode = subprocess.call(cmd)
if retcode: # pragma: no cover
_log().error('[%s] background runner failed with %d', name, retcode)
else:
_log().debug('[%s] background job started', name)
return retcode
def main(wf): # pragma: no cover
"""Run command in a background process.
Load cached arguments, fork into background, then call
:meth:`subprocess.call` with cached arguments.
"""
log = wf.logger
name = wf.args[0]
argcache = _arg_cache(name)
if not os.path.exists(argcache):
msg = '[{0}] command cache not found: {1}'.format(name, argcache)
log.critical(msg)
raise IOError(msg)
# Fork to background and run command
pidfile = _pid_file(name)
_background(pidfile)
# Load cached arguments
with open(argcache, 'rb') as fp:
data = pickle.load(fp)
# Cached arguments
args = data['args']
kwargs = data['kwargs']
# Delete argument cache file
os.unlink(argcache)
try:
# Run the command
log.debug('[%s] running command: %r', name, args)
retcode = subprocess.call(args, **kwargs)
if retcode:
log.error('[%s] command failed with status %d', name, retcode)
finally:
os.unlink(pidfile)
log.debug('[%s] job complete', name)
if __name__ == '__main__': # pragma: no cover
wf().run(main)
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/background.py
|
background.py
|
from __future__ import print_function, unicode_literals
import binascii
import cPickle
from copy import deepcopy
import json
import logging
import logging.handlers
import os
import pickle
import plistlib
import re
import shutil
import string
import subprocess
import sys
import time
import unicodedata
try:
import xml.etree.cElementTree as ET
except ImportError: # pragma: no cover
import xml.etree.ElementTree as ET
# imported to maintain API
from util import AcquisitionError # noqa: F401
from util import (
atomic_writer,
LockFile,
uninterruptible,
)
#: Sentinel for properties that haven't been set yet (that might
#: correctly have the value ``None``)
UNSET = object()
####################################################################
# Standard system icons
####################################################################
# These icons are default macOS icons. They are super-high quality, and
# will be familiar to users.
# This library uses `ICON_ERROR` when a workflow dies in flames, so
# in my own workflows, I use `ICON_WARNING` for less fatal errors
# (e.g. bad user input, no results etc.)
# The system icons are all in this directory. There are many more than
# are listed here
ICON_ROOT = '/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources'
ICON_ACCOUNT = os.path.join(ICON_ROOT, 'Accounts.icns')
ICON_BURN = os.path.join(ICON_ROOT, 'BurningIcon.icns')
ICON_CLOCK = os.path.join(ICON_ROOT, 'Clock.icns')
ICON_COLOR = os.path.join(ICON_ROOT, 'ProfileBackgroundColor.icns')
ICON_COLOUR = ICON_COLOR # Queen's English, if you please
ICON_EJECT = os.path.join(ICON_ROOT, 'EjectMediaIcon.icns')
# Shown when a workflow throws an error
ICON_ERROR = os.path.join(ICON_ROOT, 'AlertStopIcon.icns')
ICON_FAVORITE = os.path.join(ICON_ROOT, 'ToolbarFavoritesIcon.icns')
ICON_FAVOURITE = ICON_FAVORITE
ICON_GROUP = os.path.join(ICON_ROOT, 'GroupIcon.icns')
ICON_HELP = os.path.join(ICON_ROOT, 'HelpIcon.icns')
ICON_HOME = os.path.join(ICON_ROOT, 'HomeFolderIcon.icns')
ICON_INFO = os.path.join(ICON_ROOT, 'ToolbarInfo.icns')
ICON_NETWORK = os.path.join(ICON_ROOT, 'GenericNetworkIcon.icns')
ICON_NOTE = os.path.join(ICON_ROOT, 'AlertNoteIcon.icns')
ICON_SETTINGS = os.path.join(ICON_ROOT, 'ToolbarAdvanced.icns')
ICON_SWIRL = os.path.join(ICON_ROOT, 'ErasingIcon.icns')
ICON_SWITCH = os.path.join(ICON_ROOT, 'General.icns')
ICON_SYNC = os.path.join(ICON_ROOT, 'Sync.icns')
ICON_TRASH = os.path.join(ICON_ROOT, 'TrashIcon.icns')
ICON_USER = os.path.join(ICON_ROOT, 'UserIcon.icns')
ICON_WARNING = os.path.join(ICON_ROOT, 'AlertCautionIcon.icns')
ICON_WEB = os.path.join(ICON_ROOT, 'BookmarkIcon.icns')
####################################################################
# non-ASCII to ASCII diacritic folding.
# Used by `fold_to_ascii` method
####################################################################
ASCII_REPLACEMENTS = {
'À': 'A',
'Á': 'A',
'Â': 'A',
'Ã': 'A',
'Ä': 'A',
'Å': 'A',
'Æ': 'AE',
'Ç': 'C',
'È': 'E',
'É': 'E',
'Ê': 'E',
'Ë': 'E',
'Ì': 'I',
'Í': 'I',
'Î': 'I',
'Ï': 'I',
'Ð': 'D',
'Ñ': 'N',
'Ò': 'O',
'Ó': 'O',
'Ô': 'O',
'Õ': 'O',
'Ö': 'O',
'Ø': 'O',
'Ù': 'U',
'Ú': 'U',
'Û': 'U',
'Ü': 'U',
'Ý': 'Y',
'Þ': 'Th',
'ß': 'ss',
'à': 'a',
'á': 'a',
'â': 'a',
'ã': 'a',
'ä': 'a',
'å': 'a',
'æ': 'ae',
'ç': 'c',
'è': 'e',
'é': 'e',
'ê': 'e',
'ë': 'e',
'ì': 'i',
'í': 'i',
'î': 'i',
'ï': 'i',
'ð': 'd',
'ñ': 'n',
'ò': 'o',
'ó': 'o',
'ô': 'o',
'õ': 'o',
'ö': 'o',
'ø': 'o',
'ù': 'u',
'ú': 'u',
'û': 'u',
'ü': 'u',
'ý': 'y',
'þ': 'th',
'ÿ': 'y',
'Ł': 'L',
'ł': 'l',
'Ń': 'N',
'ń': 'n',
'Ņ': 'N',
'ņ': 'n',
'Ň': 'N',
'ň': 'n',
'Ŋ': 'ng',
'ŋ': 'NG',
'Ō': 'O',
'ō': 'o',
'Ŏ': 'O',
'ŏ': 'o',
'Ő': 'O',
'ő': 'o',
'Œ': 'OE',
'œ': 'oe',
'Ŕ': 'R',
'ŕ': 'r',
'Ŗ': 'R',
'ŗ': 'r',
'Ř': 'R',
'ř': 'r',
'Ś': 'S',
'ś': 's',
'Ŝ': 'S',
'ŝ': 's',
'Ş': 'S',
'ş': 's',
'Š': 'S',
'š': 's',
'Ţ': 'T',
'ţ': 't',
'Ť': 'T',
'ť': 't',
'Ŧ': 'T',
'ŧ': 't',
'Ũ': 'U',
'ũ': 'u',
'Ū': 'U',
'ū': 'u',
'Ŭ': 'U',
'ŭ': 'u',
'Ů': 'U',
'ů': 'u',
'Ű': 'U',
'ű': 'u',
'Ŵ': 'W',
'ŵ': 'w',
'Ŷ': 'Y',
'ŷ': 'y',
'Ÿ': 'Y',
'Ź': 'Z',
'ź': 'z',
'Ż': 'Z',
'ż': 'z',
'Ž': 'Z',
'ž': 'z',
'ſ': 's',
'Α': 'A',
'Β': 'B',
'Γ': 'G',
'Δ': 'D',
'Ε': 'E',
'Ζ': 'Z',
'Η': 'E',
'Θ': 'Th',
'Ι': 'I',
'Κ': 'K',
'Λ': 'L',
'Μ': 'M',
'Ν': 'N',
'Ξ': 'Ks',
'Ο': 'O',
'Π': 'P',
'Ρ': 'R',
'Σ': 'S',
'Τ': 'T',
'Υ': 'U',
'Φ': 'Ph',
'Χ': 'Kh',
'Ψ': 'Ps',
'Ω': 'O',
'α': 'a',
'β': 'b',
'γ': 'g',
'δ': 'd',
'ε': 'e',
'ζ': 'z',
'η': 'e',
'θ': 'th',
'ι': 'i',
'κ': 'k',
'λ': 'l',
'μ': 'm',
'ν': 'n',
'ξ': 'x',
'ο': 'o',
'π': 'p',
'ρ': 'r',
'ς': 's',
'σ': 's',
'τ': 't',
'υ': 'u',
'φ': 'ph',
'χ': 'kh',
'ψ': 'ps',
'ω': 'o',
'А': 'A',
'Б': 'B',
'В': 'V',
'Г': 'G',
'Д': 'D',
'Е': 'E',
'Ж': 'Zh',
'З': 'Z',
'И': 'I',
'Й': 'I',
'К': 'K',
'Л': 'L',
'М': 'M',
'Н': 'N',
'О': 'O',
'П': 'P',
'Р': 'R',
'С': 'S',
'Т': 'T',
'У': 'U',
'Ф': 'F',
'Х': 'Kh',
'Ц': 'Ts',
'Ч': 'Ch',
'Ш': 'Sh',
'Щ': 'Shch',
'Ъ': "'",
'Ы': 'Y',
'Ь': "'",
'Э': 'E',
'Ю': 'Iu',
'Я': 'Ia',
'а': 'a',
'б': 'b',
'в': 'v',
'г': 'g',
'д': 'd',
'е': 'e',
'ж': 'zh',
'з': 'z',
'и': 'i',
'й': 'i',
'к': 'k',
'л': 'l',
'м': 'm',
'н': 'n',
'о': 'o',
'п': 'p',
'р': 'r',
'с': 's',
'т': 't',
'у': 'u',
'ф': 'f',
'х': 'kh',
'ц': 'ts',
'ч': 'ch',
'ш': 'sh',
'щ': 'shch',
'ъ': "'",
'ы': 'y',
'ь': "'",
'э': 'e',
'ю': 'iu',
'я': 'ia',
# 'ᴀ': '',
# 'ᴁ': '',
# 'ᴂ': '',
# 'ᴃ': '',
# 'ᴄ': '',
# 'ᴅ': '',
# 'ᴆ': '',
# 'ᴇ': '',
# 'ᴈ': '',
# 'ᴉ': '',
# 'ᴊ': '',
# 'ᴋ': '',
# 'ᴌ': '',
# 'ᴍ': '',
# 'ᴎ': '',
# 'ᴏ': '',
# 'ᴐ': '',
# 'ᴑ': '',
# 'ᴒ': '',
# 'ᴓ': '',
# 'ᴔ': '',
# 'ᴕ': '',
# 'ᴖ': '',
# 'ᴗ': '',
# 'ᴘ': '',
# 'ᴙ': '',
# 'ᴚ': '',
# 'ᴛ': '',
# 'ᴜ': '',
# 'ᴝ': '',
# 'ᴞ': '',
# 'ᴟ': '',
# 'ᴠ': '',
# 'ᴡ': '',
# 'ᴢ': '',
# 'ᴣ': '',
# 'ᴤ': '',
# 'ᴥ': '',
'ᴦ': 'G',
'ᴧ': 'L',
'ᴨ': 'P',
'ᴩ': 'R',
'ᴪ': 'PS',
'ẞ': 'Ss',
'Ỳ': 'Y',
'ỳ': 'y',
'Ỵ': 'Y',
'ỵ': 'y',
'Ỹ': 'Y',
'ỹ': 'y',
}
####################################################################
# Smart-to-dumb punctuation mapping
####################################################################
DUMB_PUNCTUATION = {
'‘': "'",
'’': "'",
'‚': "'",
'“': '"',
'”': '"',
'„': '"',
'–': '-',
'—': '-'
}
####################################################################
# Used by `Workflow.filter`
####################################################################
# Anchor characters in a name
#: Characters that indicate the beginning of a "word" in CamelCase
INITIALS = string.ascii_uppercase + string.digits
#: Split on non-letters, numbers
split_on_delimiters = re.compile('[^a-zA-Z0-9]').split
# Match filter flags
#: Match items that start with ``query``
MATCH_STARTSWITH = 1
#: Match items whose capital letters start with ``query``
MATCH_CAPITALS = 2
#: Match items with a component "word" that matches ``query``
MATCH_ATOM = 4
#: Match items whose initials (based on atoms) start with ``query``
MATCH_INITIALS_STARTSWITH = 8
#: Match items whose initials (based on atoms) contain ``query``
MATCH_INITIALS_CONTAIN = 16
#: Combination of :const:`MATCH_INITIALS_STARTSWITH` and
#: :const:`MATCH_INITIALS_CONTAIN`
MATCH_INITIALS = 24
#: Match items if ``query`` is a substring
MATCH_SUBSTRING = 32
#: Match items if all characters in ``query`` appear in the item in order
MATCH_ALLCHARS = 64
#: Combination of all other ``MATCH_*`` constants
MATCH_ALL = 127
####################################################################
# Used by `Workflow.check_update`
####################################################################
# Number of days to wait between checking for updates to the workflow
DEFAULT_UPDATE_FREQUENCY = 1
####################################################################
# Keychain access errors
####################################################################
class KeychainError(Exception):
"""Raised for unknown Keychain errors.
Raised by methods :meth:`Workflow.save_password`,
:meth:`Workflow.get_password` and :meth:`Workflow.delete_password`
when ``security`` CLI app returns an unknown error code.
"""
class PasswordNotFound(KeychainError):
"""Password not in Keychain.
Raised by method :meth:`Workflow.get_password` when ``account``
is unknown to the Keychain.
"""
class PasswordExists(KeychainError):
"""Raised when trying to overwrite an existing account password.
You should never receive this error: it is used internally
by the :meth:`Workflow.save_password` method to know if it needs
to delete the old password first (a Keychain implementation detail).
"""
####################################################################
# Helper functions
####################################################################
def isascii(text):
"""Test if ``text`` contains only ASCII characters.
:param text: text to test for ASCII-ness
:type text: ``unicode``
:returns: ``True`` if ``text`` contains only ASCII characters
:rtype: ``Boolean``
"""
try:
text.encode('ascii')
except UnicodeEncodeError:
return False
return True
####################################################################
# Implementation classes
####################################################################
class SerializerManager(object):
"""Contains registered serializers.
.. versionadded:: 1.8
A configured instance of this class is available at
:attr:`workflow.manager`.
Use :meth:`register()` to register new (or replace
existing) serializers, which you can specify by name when calling
:class:`~workflow.Workflow` data storage methods.
See :ref:`guide-serialization` and :ref:`guide-persistent-data`
for further information.
"""
def __init__(self):
"""Create new SerializerManager object."""
self._serializers = {}
def register(self, name, serializer):
"""Register ``serializer`` object under ``name``.
Raises :class:`AttributeError` if ``serializer`` in invalid.
.. note::
``name`` will be used as the file extension of the saved files.
:param name: Name to register ``serializer`` under
:type name: ``unicode`` or ``str``
:param serializer: object with ``load()`` and ``dump()``
methods
"""
# Basic validation
getattr(serializer, 'load')
getattr(serializer, 'dump')
self._serializers[name] = serializer
def serializer(self, name):
"""Return serializer object for ``name``.
:param name: Name of serializer to return
:type name: ``unicode`` or ``str``
:returns: serializer object or ``None`` if no such serializer
is registered.
"""
return self._serializers.get(name)
def unregister(self, name):
"""Remove registered serializer with ``name``.
Raises a :class:`ValueError` if there is no such registered
serializer.
:param name: Name of serializer to remove
:type name: ``unicode`` or ``str``
:returns: serializer object
"""
if name not in self._serializers:
raise ValueError('No such serializer registered : {0}'.format(
name))
serializer = self._serializers[name]
del self._serializers[name]
return serializer
@property
def serializers(self):
"""Return names of registered serializers."""
return sorted(self._serializers.keys())
class JSONSerializer(object):
"""Wrapper around :mod:`json`. Sets ``indent`` and ``encoding``.
.. versionadded:: 1.8
Use this serializer if you need readable data files. JSON doesn't
support Python objects as well as ``cPickle``/``pickle``, so be
careful which data you try to serialize as JSON.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open JSON file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from JSON file
:rtype: object
"""
return json.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open JSON file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: JSON-serializable data structure
:param file_obj: file handle
:type file_obj: ``file`` object
"""
return json.dump(obj, file_obj, indent=2, encoding='utf-8')
class CPickleSerializer(object):
"""Wrapper around :mod:`cPickle`. Sets ``protocol``.
.. versionadded:: 1.8
This is the default serializer and the best combination of speed and
flexibility.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open pickle file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from pickle file
:rtype: object
"""
return cPickle.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open pickle file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: Python object
:param file_obj: file handle
:type file_obj: ``file`` object
"""
return cPickle.dump(obj, file_obj, protocol=-1)
class PickleSerializer(object):
"""Wrapper around :mod:`pickle`. Sets ``protocol``.
.. versionadded:: 1.8
Use this serializer if you need to add custom pickling.
"""
@classmethod
def load(cls, file_obj):
"""Load serialized object from open pickle file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
:returns: object loaded from pickle file
:rtype: object
"""
return pickle.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
"""Serialize object ``obj`` to open pickle file.
.. versionadded:: 1.8
:param obj: Python object to serialize
:type obj: Python object
:param file_obj: file handle
:type file_obj: ``file`` object
"""
return pickle.dump(obj, file_obj, protocol=-1)
# Set up default manager and register built-in serializers
manager = SerializerManager()
manager.register('cpickle', CPickleSerializer)
manager.register('pickle', PickleSerializer)
manager.register('json', JSONSerializer)
class Item(object):
"""Represents a feedback item for Alfred.
Generates Alfred-compliant XML for a single item.
You probably shouldn't use this class directly, but via
:meth:`Workflow.add_item`. See :meth:`~Workflow.add_item`
for details of arguments.
"""
def __init__(self, title, subtitle='', modifier_subtitles=None,
arg=None, autocomplete=None, valid=False, uid=None,
icon=None, icontype=None, type=None, largetext=None,
copytext=None, quicklookurl=None):
"""Same arguments as :meth:`Workflow.add_item`."""
self.title = title
self.subtitle = subtitle
self.modifier_subtitles = modifier_subtitles or {}
self.arg = arg
self.autocomplete = autocomplete
self.valid = valid
self.uid = uid
self.icon = icon
self.icontype = icontype
self.type = type
self.largetext = largetext
self.copytext = copytext
self.quicklookurl = quicklookurl
@property
def elem(self):
"""Create and return feedback item for Alfred.
:returns: :class:`ElementTree.Element <xml.etree.ElementTree.Element>`
instance for this :class:`Item` instance.
"""
# Attributes on <item> element
attr = {}
if self.valid:
attr['valid'] = 'yes'
else:
attr['valid'] = 'no'
# Allow empty string for autocomplete. This is a useful value,
# as TABing the result will revert the query back to just the
# keyword
if self.autocomplete is not None:
attr['autocomplete'] = self.autocomplete
# Optional attributes
for name in ('uid', 'type'):
value = getattr(self, name, None)
if value:
attr[name] = value
root = ET.Element('item', attr)
ET.SubElement(root, 'title').text = self.title
ET.SubElement(root, 'subtitle').text = self.subtitle
# Add modifier subtitles
for mod in ('cmd', 'ctrl', 'alt', 'shift', 'fn'):
if mod in self.modifier_subtitles:
ET.SubElement(root, 'subtitle',
{'mod': mod}).text = self.modifier_subtitles[mod]
# Add arg as element instead of attribute on <item>, as it's more
# flexible (newlines aren't allowed in attributes)
if self.arg:
ET.SubElement(root, 'arg').text = self.arg
# Add icon if there is one
if self.icon:
if self.icontype:
attr = dict(type=self.icontype)
else:
attr = {}
ET.SubElement(root, 'icon', attr).text = self.icon
if self.largetext:
ET.SubElement(root, 'text',
{'type': 'largetype'}).text = self.largetext
if self.copytext:
ET.SubElement(root, 'text',
{'type': 'copy'}).text = self.copytext
if self.quicklookurl:
ET.SubElement(root, 'quicklookurl').text = self.quicklookurl
return root
class Settings(dict):
"""A dictionary that saves itself when changed.
Dictionary keys & values will be saved as a JSON file
at ``filepath``. If the file does not exist, the dictionary
(and settings file) will be initialised with ``defaults``.
:param filepath: where to save the settings
:type filepath: :class:`unicode`
:param defaults: dict of default settings
:type defaults: :class:`dict`
An appropriate instance is provided by :class:`Workflow` instances at
:attr:`Workflow.settings`.
"""
def __init__(self, filepath, defaults=None):
"""Create new :class:`Settings` object."""
super(Settings, self).__init__()
self._filepath = filepath
self._nosave = False
self._original = {}
if os.path.exists(self._filepath):
self._load()
elif defaults:
for key, val in defaults.items():
self[key] = val
self.save() # save default settings
def _load(self):
"""Load cached settings from JSON file `self._filepath`."""
data = {}
with LockFile(self._filepath, 0.5):
with open(self._filepath, 'rb') as fp:
data.update(json.load(fp))
self._original = deepcopy(data)
self._nosave = True
self.update(data)
self._nosave = False
@uninterruptible
def save(self):
"""Save settings to JSON file specified in ``self._filepath``.
If you're using this class via :attr:`Workflow.settings`, which
you probably are, ``self._filepath`` will be ``settings.json``
in your workflow's data directory (see :attr:`~Workflow.datadir`).
"""
if self._nosave:
return
data = {}
data.update(self)
with LockFile(self._filepath, 0.5):
with atomic_writer(self._filepath, 'wb') as fp:
json.dump(data, fp, sort_keys=True, indent=2,
encoding='utf-8')
# dict methods
def __setitem__(self, key, value):
"""Implement :class:`dict` interface."""
if self._original.get(key) != value:
super(Settings, self).__setitem__(key, value)
self.save()
def __delitem__(self, key):
"""Implement :class:`dict` interface."""
super(Settings, self).__delitem__(key)
self.save()
def update(self, *args, **kwargs):
"""Override :class:`dict` method to save on update."""
super(Settings, self).update(*args, **kwargs)
self.save()
def setdefault(self, key, value=None):
"""Override :class:`dict` method to save on update."""
ret = super(Settings, self).setdefault(key, value)
self.save()
return ret
class Workflow(object):
"""The ``Workflow`` object is the main interface to Alfred-Workflow.
It provides APIs for accessing the Alfred/workflow environment,
storing & caching data, using Keychain, and generating Script
Filter feedback.
``Workflow`` is compatible with Alfred 2+. Subclass
:class:`~workflow.Workflow3` provides additional features,
only available in Alfred 3+, such as workflow variables.
:param default_settings: default workflow settings. If no settings file
exists, :class:`Workflow.settings` will be pre-populated with
``default_settings``.
:type default_settings: :class:`dict`
:param update_settings: settings for updating your workflow from
GitHub releases. The only required key is ``github_slug``,
whose value must take the form of ``username/repo``.
If specified, ``Workflow`` will check the repo's releases
for updates. Your workflow must also have a semantic version
number. Please see the :ref:`User Manual <user-manual>` and
`update API docs <api-updates>` for more information.
:type update_settings: :class:`dict`
:param input_encoding: encoding of command line arguments. You
should probably leave this as the default (``utf-8``), which
is the encoding Alfred uses.
:type input_encoding: :class:`unicode`
:param normalization: normalisation to apply to CLI args.
See :meth:`Workflow.decode` for more details.
:type normalization: :class:`unicode`
:param capture_args: Capture and act on ``workflow:*`` arguments. See
:ref:`Magic arguments <magic-arguments>` for details.
:type capture_args: :class:`Boolean`
:param libraries: sequence of paths to directories containing
libraries. These paths will be prepended to ``sys.path``.
:type libraries: :class:`tuple` or :class:`list`
:param help_url: URL to webpage where a user can ask for help with
the workflow, report bugs, etc. This could be the GitHub repo
or a page on AlfredForum.com. If your workflow throws an error,
this URL will be displayed in the log and Alfred's debugger. It can
also be opened directly in a web browser with the ``workflow:help``
:ref:`magic argument <magic-arguments>`.
:type help_url: :class:`unicode` or :class:`str`
"""
# Which class to use to generate feedback items. You probably
# won't want to change this
item_class = Item
def __init__(self, default_settings=None, update_settings=None,
input_encoding='utf-8', normalization='NFC',
capture_args=True, libraries=None,
help_url=None):
"""Create new :class:`Workflow` object."""
self._default_settings = default_settings or {}
self._update_settings = update_settings or {}
self._input_encoding = input_encoding
self._normalizsation = normalization
self._capture_args = capture_args
self.help_url = help_url
self._workflowdir = None
self._settings_path = None
self._settings = None
self._bundleid = None
self._debugging = None
self._name = None
self._cache_serializer = 'cpickle'
self._data_serializer = 'cpickle'
self._info = None
self._info_loaded = False
self._logger = None
self._items = []
self._alfred_env = None
# Version number of the workflow
self._version = UNSET
# Version from last workflow run
self._last_version_run = UNSET
# Cache for regex patterns created for filter keys
self._search_pattern_cache = {}
#: Prefix for all magic arguments.
#: The default value is ``workflow:`` so keyword
#: ``config`` would match user query ``workflow:config``.
self.magic_prefix = 'workflow:'
#: Mapping of available magic arguments. The built-in magic
#: arguments are registered by default. To add your own magic arguments
#: (or override built-ins), add a key:value pair where the key is
#: what the user should enter (prefixed with :attr:`magic_prefix`)
#: and the value is a callable that will be called when the argument
#: is entered. If you would like to display a message in Alfred, the
#: function should return a ``unicode`` string.
#:
#: By default, the magic arguments documented
#: :ref:`here <magic-arguments>` are registered.
self.magic_arguments = {}
self._register_default_magic()
if libraries:
sys.path = libraries + sys.path
####################################################################
# API methods
####################################################################
# info.plist contents and alfred_* environment variables ----------
@property
def alfred_version(self):
"""Alfred version as :class:`~workflow.update.Version` object."""
from update import Version
return Version(self.alfred_env.get('version'))
@property
def alfred_env(self):
"""Dict of Alfred's environmental variables minus ``alfred_`` prefix.
.. versionadded:: 1.7
The variables Alfred 2.4+ exports are:
============================ =========================================
Variable Description
============================ =========================================
debug Set to ``1`` if Alfred's debugger is
open, otherwise unset.
preferences Path to Alfred.alfredpreferences
(where your workflows and settings are
stored).
preferences_localhash Machine-specific preferences are stored
in ``Alfred.alfredpreferences/preferences/local/<hash>``
(see ``preferences`` above for
the path to ``Alfred.alfredpreferences``)
theme ID of selected theme
theme_background Background colour of selected theme in
format ``rgba(r,g,b,a)``
theme_subtext Show result subtext.
``0`` = Always,
``1`` = Alternative actions only,
``2`` = Selected result only,
``3`` = Never
version Alfred version number, e.g. ``'2.4'``
version_build Alfred build number, e.g. ``277``
workflow_bundleid Bundle ID, e.g.
``net.deanishe.alfred-mailto``
workflow_cache Path to workflow's cache directory
workflow_data Path to workflow's data directory
workflow_name Name of current workflow
workflow_uid UID of workflow
workflow_version The version number specified in the
workflow configuration sheet/info.plist
============================ =========================================
**Note:** all values are Unicode strings except ``version_build`` and
``theme_subtext``, which are integers.
:returns: ``dict`` of Alfred's environmental variables without the
``alfred_`` prefix, e.g. ``preferences``, ``workflow_data``.
"""
if self._alfred_env is not None:
return self._alfred_env
data = {}
for key in (
'debug',
'preferences',
'preferences_localhash',
'theme',
'theme_background',
'theme_subtext',
'version',
'version_build',
'workflow_bundleid',
'workflow_cache',
'workflow_data',
'workflow_name',
'workflow_uid',
'workflow_version'):
value = os.getenv('alfred_' + key, '')
if value:
if key in ('debug', 'version_build', 'theme_subtext'):
value = int(value)
else:
value = self.decode(value)
data[key] = value
self._alfred_env = data
return self._alfred_env
@property
def info(self):
""":class:`dict` of ``info.plist`` contents."""
if not self._info_loaded:
self._load_info_plist()
return self._info
@property
def bundleid(self):
"""Workflow bundle ID from environmental vars or ``info.plist``.
:returns: bundle ID
:rtype: ``unicode``
"""
if not self._bundleid:
if self.alfred_env.get('workflow_bundleid'):
self._bundleid = self.alfred_env.get('workflow_bundleid')
else:
self._bundleid = unicode(self.info['bundleid'], 'utf-8')
return self._bundleid
@property
def debugging(self):
"""Whether Alfred's debugger is open.
:returns: ``True`` if Alfred's debugger is open.
:rtype: ``bool``
"""
return self.alfred_env.get('debug') == 1
@property
def name(self):
"""Workflow name from Alfred's environmental vars or ``info.plist``.
:returns: workflow name
:rtype: ``unicode``
"""
if not self._name:
if self.alfred_env.get('workflow_name'):
self._name = self.decode(self.alfred_env.get('workflow_name'))
else:
self._name = self.decode(self.info['name'])
return self._name
@property
def version(self):
"""Return the version of the workflow.
.. versionadded:: 1.9.10
Get the workflow version from environment variable,
the ``update_settings`` dict passed on
instantiation, the ``version`` file located in the workflow's
root directory or ``info.plist``. Return ``None`` if none
exists or :class:`ValueError` if the version number is invalid
(i.e. not semantic).
:returns: Version of the workflow (not Alfred-Workflow)
:rtype: :class:`~workflow.update.Version` object
"""
if self._version is UNSET:
version = None
# environment variable has priority
if self.alfred_env.get('workflow_version'):
version = self.alfred_env['workflow_version']
# Try `update_settings`
elif self._update_settings:
version = self._update_settings.get('version')
# `version` file
if not version:
filepath = self.workflowfile('version')
if os.path.exists(filepath):
with open(filepath, 'rb') as fileobj:
version = fileobj.read()
# info.plist
if not version:
version = self.info.get('version')
if version:
from update import Version
version = Version(version)
self._version = version
return self._version
# Workflow utility methods -----------------------------------------
@property
def args(self):
"""Return command line args as normalised unicode.
Args are decoded and normalised via :meth:`~Workflow.decode`.
The encoding and normalisation are the ``input_encoding`` and
``normalization`` arguments passed to :class:`Workflow` (``UTF-8``
and ``NFC`` are the defaults).
If :class:`Workflow` is called with ``capture_args=True``
(the default), :class:`Workflow` will look for certain
``workflow:*`` args and, if found, perform the corresponding
actions and exit the workflow.
See :ref:`Magic arguments <magic-arguments>` for details.
"""
msg = None
args = [self.decode(arg) for arg in sys.argv[1:]]
# Handle magic args
if len(args) and self._capture_args:
for name in self.magic_arguments:
key = '{0}{1}'.format(self.magic_prefix, name)
if key in args:
msg = self.magic_arguments[name]()
if msg:
self.logger.debug(msg)
if not sys.stdout.isatty(): # Show message in Alfred
self.add_item(msg, valid=False, icon=ICON_INFO)
self.send_feedback()
sys.exit(0)
return args
@property
def cachedir(self):
"""Path to workflow's cache directory.
The cache directory is a subdirectory of Alfred's own cache directory
in ``~/Library/Caches``. The full path is in Alfred 4+ is:
``~/Library/Caches/com.runningwithcrayons.Alfred/Workflow Data/<bundle id>``
For earlier versions:
``~/Library/Caches/com.runningwithcrayons.Alfred-X/Workflow Data/<bundle id>``
where ``Alfred-X`` may be ``Alfred-2`` or ``Alfred-3``.
Returns:
unicode: full path to workflow's cache directory
"""
if self.alfred_env.get('workflow_cache'):
dirpath = self.alfred_env.get('workflow_cache')
else:
dirpath = self._default_cachedir
return self._create(dirpath)
@property
def _default_cachedir(self):
"""Alfred 2's default cache directory."""
return os.path.join(
os.path.expanduser(
'~/Library/Caches/com.runningwithcrayons.Alfred-2/'
'Workflow Data/'),
self.bundleid)
@property
def datadir(self):
"""Path to workflow's data directory.
The data directory is a subdirectory of Alfred's own data directory in
``~/Library/Application Support``. The full path for Alfred 4+ is:
``~/Library/Application Support/Alfred/Workflow Data/<bundle id>``
For earlier versions, the path is:
``~/Library/Application Support/Alfred X/Workflow Data/<bundle id>``
where ``Alfred X` is ``Alfred 2`` or ``Alfred 3``.
Returns:
unicode: full path to workflow data directory
"""
if self.alfred_env.get('workflow_data'):
dirpath = self.alfred_env.get('workflow_data')
else:
dirpath = self._default_datadir
return self._create(dirpath)
@property
def _default_datadir(self):
"""Alfred 2's default data directory."""
return os.path.join(os.path.expanduser(
'~/Library/Application Support/Alfred 2/Workflow Data/'),
self.bundleid)
@property
def workflowdir(self):
"""Path to workflow's root directory (where ``info.plist`` is).
Returns:
unicode: full path to workflow root directory
"""
if not self._workflowdir:
# Try the working directory first, then the directory
# the library is in. CWD will be the workflow root if
# a workflow is being run in Alfred
candidates = [
os.path.abspath(os.getcwdu()),
os.path.dirname(os.path.abspath(os.path.dirname(__file__)))]
# climb the directory tree until we find `info.plist`
for dirpath in candidates:
# Ensure directory path is Unicode
dirpath = self.decode(dirpath)
while True:
if os.path.exists(os.path.join(dirpath, 'info.plist')):
self._workflowdir = dirpath
break
elif dirpath == '/':
# no `info.plist` found
break
# Check the parent directory
dirpath = os.path.dirname(dirpath)
# No need to check other candidates
if self._workflowdir:
break
if not self._workflowdir:
raise IOError("'info.plist' not found in directory tree")
return self._workflowdir
def cachefile(self, filename):
"""Path to ``filename`` in workflow's cache directory.
Return absolute path to ``filename`` within your workflow's
:attr:`cache directory <Workflow.cachedir>`.
:param filename: basename of file
:type filename: ``unicode``
:returns: full path to file within cache directory
:rtype: ``unicode``
"""
return os.path.join(self.cachedir, filename)
def datafile(self, filename):
"""Path to ``filename`` in workflow's data directory.
Return absolute path to ``filename`` within your workflow's
:attr:`data directory <Workflow.datadir>`.
:param filename: basename of file
:type filename: ``unicode``
:returns: full path to file within data directory
:rtype: ``unicode``
"""
return os.path.join(self.datadir, filename)
def workflowfile(self, filename):
"""Return full path to ``filename`` in workflow's root directory.
:param filename: basename of file
:type filename: ``unicode``
:returns: full path to file within data directory
:rtype: ``unicode``
"""
return os.path.join(self.workflowdir, filename)
@property
def logfile(self):
"""Path to logfile.
:returns: path to logfile within workflow's cache directory
:rtype: ``unicode``
"""
return self.cachefile('%s.log' % self.bundleid)
@property
def logger(self):
"""Logger that logs to both console and a log file.
If Alfred's debugger is open, log level will be ``DEBUG``,
else it will be ``INFO``.
Use :meth:`open_log` to open the log file in Console.
:returns: an initialised :class:`~logging.Logger`
"""
if self._logger:
return self._logger
# Initialise new logger and optionally handlers
logger = logging.getLogger('')
# Only add one set of handlers
# Exclude from coverage, as pytest will have configured the
# root logger already
if not len(logger.handlers): # pragma: no cover
fmt = logging.Formatter(
'%(asctime)s %(filename)s:%(lineno)s'
' %(levelname)-8s %(message)s',
datefmt='%H:%M:%S')
logfile = logging.handlers.RotatingFileHandler(
self.logfile,
maxBytes=1024 * 1024,
backupCount=1)
logfile.setFormatter(fmt)
logger.addHandler(logfile)
console = logging.StreamHandler()
console.setFormatter(fmt)
logger.addHandler(console)
if self.debugging:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
self._logger = logger
return self._logger
@logger.setter
def logger(self, logger):
"""Set a custom logger.
:param logger: The logger to use
:type logger: `~logging.Logger` instance
"""
self._logger = logger
@property
def settings_path(self):
"""Path to settings file within workflow's data directory.
:returns: path to ``settings.json`` file
:rtype: ``unicode``
"""
if not self._settings_path:
self._settings_path = self.datafile('settings.json')
return self._settings_path
@property
def settings(self):
"""Return a dictionary subclass that saves itself when changed.
See :ref:`guide-settings` in the :ref:`user-manual` for more
information on how to use :attr:`settings` and **important
limitations** on what it can do.
:returns: :class:`~workflow.workflow.Settings` instance
initialised from the data in JSON file at
:attr:`settings_path` or if that doesn't exist, with the
``default_settings`` :class:`dict` passed to
:class:`Workflow` on instantiation.
:rtype: :class:`~workflow.workflow.Settings` instance
"""
if not self._settings:
self.logger.debug('reading settings from %s', self.settings_path)
self._settings = Settings(self.settings_path,
self._default_settings)
return self._settings
@property
def cache_serializer(self):
"""Name of default cache serializer.
.. versionadded:: 1.8
This serializer is used by :meth:`cache_data()` and
:meth:`cached_data()`
See :class:`SerializerManager` for details.
:returns: serializer name
:rtype: ``unicode``
"""
return self._cache_serializer
@cache_serializer.setter
def cache_serializer(self, serializer_name):
"""Set the default cache serialization format.
.. versionadded:: 1.8
This serializer is used by :meth:`cache_data()` and
:meth:`cached_data()`
The specified serializer must already by registered with the
:class:`SerializerManager` at `~workflow.workflow.manager`,
otherwise a :class:`ValueError` will be raised.
:param serializer_name: Name of default serializer to use.
:type serializer_name:
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
'Unknown serializer : `{0}`. Register your serializer '
'with `manager` first.'.format(serializer_name))
self.logger.debug('default cache serializer: %s', serializer_name)
self._cache_serializer = serializer_name
@property
def data_serializer(self):
"""Name of default data serializer.
.. versionadded:: 1.8
This serializer is used by :meth:`store_data()` and
:meth:`stored_data()`
See :class:`SerializerManager` for details.
:returns: serializer name
:rtype: ``unicode``
"""
return self._data_serializer
@data_serializer.setter
def data_serializer(self, serializer_name):
"""Set the default cache serialization format.
.. versionadded:: 1.8
This serializer is used by :meth:`store_data()` and
:meth:`stored_data()`
The specified serializer must already by registered with the
:class:`SerializerManager` at `~workflow.workflow.manager`,
otherwise a :class:`ValueError` will be raised.
:param serializer_name: Name of serializer to use by default.
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
'Unknown serializer : `{0}`. Register your serializer '
'with `manager` first.'.format(serializer_name))
self.logger.debug('default data serializer: %s', serializer_name)
self._data_serializer = serializer_name
def stored_data(self, name):
"""Retrieve data from data directory.
Returns ``None`` if there are no data stored under ``name``.
.. versionadded:: 1.8
:param name: name of datastore
"""
metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
if not os.path.exists(metadata_path):
self.logger.debug('no data stored for `%s`', name)
return None
with open(metadata_path, 'rb') as file_obj:
serializer_name = file_obj.read().strip()
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
'Unknown serializer `{0}`. Register a corresponding '
'serializer with `manager.register()` '
'to load this data.'.format(serializer_name))
self.logger.debug('data `%s` stored as `%s`', name, serializer_name)
filename = '{0}.{1}'.format(name, serializer_name)
data_path = self.datafile(filename)
if not os.path.exists(data_path):
self.logger.debug('no data stored: %s', name)
if os.path.exists(metadata_path):
os.unlink(metadata_path)
return None
with open(data_path, 'rb') as file_obj:
data = serializer.load(file_obj)
self.logger.debug('stored data loaded: %s', data_path)
return data
def store_data(self, name, data, serializer=None):
"""Save data to data directory.
.. versionadded:: 1.8
If ``data`` is ``None``, the datastore will be deleted.
Note that the datastore does NOT support mutliple threads.
:param name: name of datastore
:param data: object(s) to store. **Note:** some serializers
can only handled certain types of data.
:param serializer: name of serializer to use. If no serializer
is specified, the default will be used. See
:class:`SerializerManager` for more information.
:returns: data in datastore or ``None``
"""
# Ensure deletion is not interrupted by SIGTERM
@uninterruptible
def delete_paths(paths):
"""Clear one or more data stores"""
for path in paths:
if os.path.exists(path):
os.unlink(path)
self.logger.debug('deleted data file: %s', path)
serializer_name = serializer or self.data_serializer
# In order for `stored_data()` to be able to load data stored with
# an arbitrary serializer, yet still have meaningful file extensions,
# the format (i.e. extension) is saved to an accompanying file
metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
filename = '{0}.{1}'.format(name, serializer_name)
data_path = self.datafile(filename)
if data_path == self.settings_path:
raise ValueError(
'Cannot save data to' +
'`{0}` with format `{1}`. '.format(name, serializer_name) +
"This would overwrite Alfred-Workflow's settings file.")
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
'Invalid serializer `{0}`. Register your serializer with '
'`manager.register()` first.'.format(serializer_name))
if data is None: # Delete cached data
delete_paths((metadata_path, data_path))
return
# Ensure write is not interrupted by SIGTERM
@uninterruptible
def _store():
# Save file extension
with atomic_writer(metadata_path, 'wb') as file_obj:
file_obj.write(serializer_name)
with atomic_writer(data_path, 'wb') as file_obj:
serializer.dump(data, file_obj)
_store()
self.logger.debug('saved data: %s', data_path)
def cached_data(self, name, data_func=None, max_age=60):
"""Return cached data if younger than ``max_age`` seconds.
Retrieve data from cache or re-generate and re-cache data if
stale/non-existant. If ``max_age`` is 0, return cached data no
matter how old.
:param name: name of datastore
:param data_func: function to (re-)generate data.
:type data_func: ``callable``
:param max_age: maximum age of cached data in seconds
:type max_age: ``int``
:returns: cached data, return value of ``data_func`` or ``None``
if ``data_func`` is not set
"""
serializer = manager.serializer(self.cache_serializer)
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
age = self.cached_data_age(name)
if (age < max_age or max_age == 0) and os.path.exists(cache_path):
with open(cache_path, 'rb') as file_obj:
self.logger.debug('loading cached data: %s', cache_path)
return serializer.load(file_obj)
if not data_func:
return None
data = data_func()
self.cache_data(name, data)
return data
def cache_data(self, name, data):
"""Save ``data`` to cache under ``name``.
If ``data`` is ``None``, the corresponding cache file will be
deleted.
:param name: name of datastore
:param data: data to store. This may be any object supported by
the cache serializer
"""
serializer = manager.serializer(self.cache_serializer)
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
if data is None:
if os.path.exists(cache_path):
os.unlink(cache_path)
self.logger.debug('deleted cache file: %s', cache_path)
return
with atomic_writer(cache_path, 'wb') as file_obj:
serializer.dump(data, file_obj)
self.logger.debug('cached data: %s', cache_path)
def cached_data_fresh(self, name, max_age):
"""Whether cache `name` is less than `max_age` seconds old.
:param name: name of datastore
:param max_age: maximum age of data in seconds
:type max_age: ``int``
:returns: ``True`` if data is less than ``max_age`` old, else
``False``
"""
age = self.cached_data_age(name)
if not age:
return False
return age < max_age
def cached_data_age(self, name):
"""Return age in seconds of cache `name` or 0 if cache doesn't exist.
:param name: name of datastore
:type name: ``unicode``
:returns: age of datastore in seconds
:rtype: ``int``
"""
cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
if not os.path.exists(cache_path):
return 0
return time.time() - os.stat(cache_path).st_mtime
def filter(self, query, items, key=lambda x: x, ascending=False,
include_score=False, min_score=0, max_results=0,
match_on=MATCH_ALL, fold_diacritics=True):
"""Fuzzy search filter. Returns list of ``items`` that match ``query``.
``query`` is case-insensitive. Any item that does not contain the
entirety of ``query`` is rejected.
If ``query`` is an empty string or contains only whitespace,
all items will match.
:param query: query to test items against
:type query: ``unicode``
:param items: iterable of items to test
:type items: ``list`` or ``tuple``
:param key: function to get comparison key from ``items``.
Must return a ``unicode`` string. The default simply returns
the item.
:type key: ``callable``
:param ascending: set to ``True`` to get worst matches first
:type ascending: ``Boolean``
:param include_score: Useful for debugging the scoring algorithm.
If ``True``, results will be a list of tuples
``(item, score, rule)``.
:type include_score: ``Boolean``
:param min_score: If non-zero, ignore results with a score lower
than this.
:type min_score: ``int``
:param max_results: If non-zero, prune results list to this length.
:type max_results: ``int``
:param match_on: Filter option flags. Bitwise-combined list of
``MATCH_*`` constants (see below).
:type match_on: ``int``
:param fold_diacritics: Convert search keys to ASCII-only
characters if ``query`` only contains ASCII characters.
:type fold_diacritics: ``Boolean``
:returns: list of ``items`` matching ``query`` or list of
``(item, score, rule)`` `tuples` if ``include_score`` is ``True``.
``rule`` is the ``MATCH_*`` rule that matched the item.
:rtype: ``list``
**Matching rules**
By default, :meth:`filter` uses all of the following flags (i.e.
:const:`MATCH_ALL`). The tests are always run in the given order:
1. :const:`MATCH_STARTSWITH`
Item search key starts with ``query`` (case-insensitive).
2. :const:`MATCH_CAPITALS`
The list of capital letters in item search key starts with
``query`` (``query`` may be lower-case). E.g., ``of``
would match ``OmniFocus``, ``gc`` would match ``Google Chrome``.
3. :const:`MATCH_ATOM`
Search key is split into "atoms" on non-word characters
(.,-,' etc.). Matches if ``query`` is one of these atoms
(case-insensitive).
4. :const:`MATCH_INITIALS_STARTSWITH`
Initials are the first characters of the above-described
"atoms" (case-insensitive).
5. :const:`MATCH_INITIALS_CONTAIN`
``query`` is a substring of the above-described initials.
6. :const:`MATCH_INITIALS`
Combination of (4) and (5).
7. :const:`MATCH_SUBSTRING`
``query`` is a substring of item search key (case-insensitive).
8. :const:`MATCH_ALLCHARS`
All characters in ``query`` appear in item search key in
the same order (case-insensitive).
9. :const:`MATCH_ALL`
Combination of all the above.
:const:`MATCH_ALLCHARS` is considerably slower than the other
tests and provides much less accurate results.
**Examples:**
To ignore :const:`MATCH_ALLCHARS` (tends to provide the worst
matches and is expensive to run), use
``match_on=MATCH_ALL ^ MATCH_ALLCHARS``.
To match only on capitals, use ``match_on=MATCH_CAPITALS``.
To match only on startswith and substring, use
``match_on=MATCH_STARTSWITH | MATCH_SUBSTRING``.
**Diacritic folding**
.. versionadded:: 1.3
If ``fold_diacritics`` is ``True`` (the default), and ``query``
contains only ASCII characters, non-ASCII characters in search keys
will be converted to ASCII equivalents (e.g. **ü** -> **u**,
**ß** -> **ss**, **é** -> **e**).
See :const:`ASCII_REPLACEMENTS` for all replacements.
If ``query`` contains non-ASCII characters, search keys will not be
altered.
"""
if not query:
return items
# Remove preceding/trailing spaces
query = query.strip()
if not query:
return items
# Use user override if there is one
fold_diacritics = self.settings.get('__workflow_diacritic_folding',
fold_diacritics)
results = []
for item in items:
skip = False
score = 0
words = [s.strip() for s in query.split(' ')]
value = key(item).strip()
if value == '':
continue
for word in words:
if word == '':
continue
s, rule = self._filter_item(value, word, match_on,
fold_diacritics)
if not s: # Skip items that don't match part of the query
skip = True
score += s
if skip:
continue
if score:
# use "reversed" `score` (i.e. highest becomes lowest) and
# `value` as sort key. This means items with the same score
# will be sorted in alphabetical not reverse alphabetical order
results.append(((100.0 / score, value.lower(), score),
(item, score, rule)))
# sort on keys, then discard the keys
results.sort(reverse=ascending)
results = [t[1] for t in results]
if min_score:
results = [r for r in results if r[1] > min_score]
if max_results and len(results) > max_results:
results = results[:max_results]
# return list of ``(item, score, rule)``
if include_score:
return results
# just return list of items
return [t[0] for t in results]
def _filter_item(self, value, query, match_on, fold_diacritics):
"""Filter ``value`` against ``query`` using rules ``match_on``.
:returns: ``(score, rule)``
"""
query = query.lower()
if not isascii(query):
fold_diacritics = False
if fold_diacritics:
value = self.fold_to_ascii(value)
# pre-filter any items that do not contain all characters
# of ``query`` to save on running several more expensive tests
if not set(query) <= set(value.lower()):
return (0, None)
# item starts with query
if match_on & MATCH_STARTSWITH and value.lower().startswith(query):
score = 100.0 - (len(value) / len(query))
return (score, MATCH_STARTSWITH)
# query matches capitalised letters in item,
# e.g. of = OmniFocus
if match_on & MATCH_CAPITALS:
initials = ''.join([c for c in value if c in INITIALS])
if initials.lower().startswith(query):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_CAPITALS)
# split the item into "atoms", i.e. words separated by
# spaces or other non-word characters
if (match_on & MATCH_ATOM or
match_on & MATCH_INITIALS_CONTAIN or
match_on & MATCH_INITIALS_STARTSWITH):
atoms = [s.lower() for s in split_on_delimiters(value)]
# print('atoms : %s --> %s' % (value, atoms))
# initials of the atoms
initials = ''.join([s[0] for s in atoms if s])
if match_on & MATCH_ATOM:
# is `query` one of the atoms in item?
# similar to substring, but scores more highly, as it's
# a word within the item
if query in atoms:
score = 100.0 - (len(value) / len(query))
return (score, MATCH_ATOM)
# `query` matches start (or all) of the initials of the
# atoms, e.g. ``himym`` matches "How I Met Your Mother"
# *and* "how i met your mother" (the ``capitals`` rule only
# matches the former)
if (match_on & MATCH_INITIALS_STARTSWITH and
initials.startswith(query)):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_STARTSWITH)
# `query` is a substring of initials, e.g. ``doh`` matches
# "The Dukes of Hazzard"
elif (match_on & MATCH_INITIALS_CONTAIN and
query in initials):
score = 95.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_CONTAIN)
# `query` is a substring of item
if match_on & MATCH_SUBSTRING and query in value.lower():
score = 90.0 - (len(value) / len(query))
return (score, MATCH_SUBSTRING)
# finally, assign a score based on how close together the
# characters in `query` are in item.
if match_on & MATCH_ALLCHARS:
search = self._search_for_query(query)
match = search(value)
if match:
score = 100.0 / ((1 + match.start()) *
(match.end() - match.start() + 1))
return (score, MATCH_ALLCHARS)
# Nothing matched
return (0, None)
def _search_for_query(self, query):
if query in self._search_pattern_cache:
return self._search_pattern_cache[query]
# Build pattern: include all characters
pattern = []
for c in query:
# pattern.append('[^{0}]*{0}'.format(re.escape(c)))
pattern.append('.*?{0}'.format(re.escape(c)))
pattern = ''.join(pattern)
search = re.compile(pattern, re.IGNORECASE).search
self._search_pattern_cache[query] = search
return search
def run(self, func, text_errors=False):
"""Call ``func`` to run your workflow.
:param func: Callable to call with ``self`` (i.e. the :class:`Workflow`
instance) as first argument.
:param text_errors: Emit error messages in plain text, not in
Alfred's XML/JSON feedback format. Use this when you're not
running Alfred-Workflow in a Script Filter and would like
to pass the error message to, say, a notification.
:type text_errors: ``Boolean``
``func`` will be called with :class:`Workflow` instance as first
argument.
``func`` should be the main entry point to your workflow.
Any exceptions raised will be logged and an error message will be
output to Alfred.
"""
start = time.time()
# Write to debugger to ensure "real" output starts on a new line
print('.', file=sys.stderr)
# Call workflow's entry function/method within a try-except block
# to catch any errors and display an error message in Alfred
try:
if self.version:
self.logger.debug('---------- %s (%s) ----------',
self.name, self.version)
else:
self.logger.debug('---------- %s ----------', self.name)
# Run update check if configured for self-updates.
# This call has to go in the `run` try-except block, as it will
# initialise `self.settings`, which will raise an exception
# if `settings.json` isn't valid.
if self._update_settings:
self.check_update()
# Run workflow's entry function/method
func(self)
# Set last version run to current version after a successful
# run
self.set_last_version()
except Exception as err:
self.logger.exception(err)
if self.help_url:
self.logger.info('for assistance, see: %s', self.help_url)
if not sys.stdout.isatty(): # Show error in Alfred
if text_errors:
print(unicode(err).encode('utf-8'), end='')
else:
self._items = []
if self._name:
name = self._name
elif self._bundleid: # pragma: no cover
name = self._bundleid
else: # pragma: no cover
name = os.path.dirname(__file__)
self.add_item("Error in workflow '%s'" % name,
unicode(err),
icon=ICON_ERROR)
self.send_feedback()
return 1
finally:
self.logger.debug('---------- finished in %0.3fs ----------',
time.time() - start)
return 0
# Alfred feedback methods ------------------------------------------
def add_item(self, title, subtitle='', modifier_subtitles=None, arg=None,
autocomplete=None, valid=False, uid=None, icon=None,
icontype=None, type=None, largetext=None, copytext=None,
quicklookurl=None):
"""Add an item to be output to Alfred.
:param title: Title shown in Alfred
:type title: ``unicode``
:param subtitle: Subtitle shown in Alfred
:type subtitle: ``unicode``
:param modifier_subtitles: Subtitles shown when modifier
(CMD, OPT etc.) is pressed. Use a ``dict`` with the lowercase
keys ``cmd``, ``ctrl``, ``shift``, ``alt`` and ``fn``
:type modifier_subtitles: ``dict``
:param arg: Argument passed by Alfred as ``{query}`` when item is
actioned
:type arg: ``unicode``
:param autocomplete: Text expanded in Alfred when item is TABbed
:type autocomplete: ``unicode``
:param valid: Whether or not item can be actioned
:type valid: ``Boolean``
:param uid: Used by Alfred to remember/sort items
:type uid: ``unicode``
:param icon: Filename of icon to use
:type icon: ``unicode``
:param icontype: Type of icon. Must be one of ``None`` , ``'filetype'``
or ``'fileicon'``. Use ``'filetype'`` when ``icon`` is a filetype
such as ``'public.folder'``. Use ``'fileicon'`` when you wish to
use the icon of the file specified as ``icon``, e.g.
``icon='/Applications/Safari.app', icontype='fileicon'``.
Leave as `None` if ``icon`` points to an actual
icon file.
:type icontype: ``unicode``
:param type: Result type. Currently only ``'file'`` is supported
(by Alfred). This will tell Alfred to enable file actions for
this item.
:type type: ``unicode``
:param largetext: Text to be displayed in Alfred's large text box
if user presses CMD+L on item.
:type largetext: ``unicode``
:param copytext: Text to be copied to pasteboard if user presses
CMD+C on item.
:type copytext: ``unicode``
:param quicklookurl: URL to be displayed using Alfred's Quick Look
feature (tapping ``SHIFT`` or ``⌘+Y`` on a result).
:type quicklookurl: ``unicode``
:returns: :class:`Item` instance
See :ref:`icons` for a list of the supported system icons.
.. note::
Although this method returns an :class:`Item` instance, you don't
need to hold onto it or worry about it. All generated :class:`Item`
instances are also collected internally and sent to Alfred when
:meth:`send_feedback` is called.
The generated :class:`Item` is only returned in case you want to
edit it or do something with it other than send it to Alfred.
"""
item = self.item_class(title, subtitle, modifier_subtitles, arg,
autocomplete, valid, uid, icon, icontype, type,
largetext, copytext, quicklookurl)
self._items.append(item)
return item
def send_feedback(self):
"""Print stored items to console/Alfred as XML."""
root = ET.Element('items')
for item in self._items:
root.append(item.elem)
sys.stdout.write('<?xml version="1.0" encoding="utf-8"?>\n')
sys.stdout.write(ET.tostring(root).encode('utf-8'))
sys.stdout.flush()
####################################################################
# Updating methods
####################################################################
@property
def first_run(self):
"""Return ``True`` if it's the first time this version has run.
.. versionadded:: 1.9.10
Raises a :class:`ValueError` if :attr:`version` isn't set.
"""
if not self.version:
raise ValueError('No workflow version set')
if not self.last_version_run:
return True
return self.version != self.last_version_run
@property
def last_version_run(self):
"""Return version of last version to run (or ``None``).
.. versionadded:: 1.9.10
:returns: :class:`~workflow.update.Version` instance
or ``None``
"""
if self._last_version_run is UNSET:
version = self.settings.get('__workflow_last_version')
if version:
from update import Version
version = Version(version)
self._last_version_run = version
self.logger.debug('last run version: %s', self._last_version_run)
return self._last_version_run
def set_last_version(self, version=None):
"""Set :attr:`last_version_run` to current version.
.. versionadded:: 1.9.10
:param version: version to store (default is current version)
:type version: :class:`~workflow.update.Version` instance
or ``unicode``
:returns: ``True`` if version is saved, else ``False``
"""
if not version:
if not self.version:
self.logger.warning(
"Can't save last version: workflow has no version")
return False
version = self.version
if isinstance(version, basestring):
from update import Version
version = Version(version)
self.settings['__workflow_last_version'] = str(version)
self.logger.debug('set last run version: %s', version)
return True
@property
def update_available(self):
"""Whether an update is available.
.. versionadded:: 1.9
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:returns: ``True`` if an update is available, else ``False``
"""
key = '__workflow_latest_version'
# Create a new workflow object to ensure standard serialiser
# is used (update.py is called without the user's settings)
status = Workflow().cached_data(key, max_age=0)
# self.logger.debug('update status: %r', status)
if not status or not status.get('available'):
return False
return status['available']
@property
def prereleases(self):
"""Whether workflow should update to pre-release versions.
.. versionadded:: 1.16
:returns: ``True`` if pre-releases are enabled with the :ref:`magic
argument <magic-arguments>` or the ``update_settings`` dict, else
``False``.
"""
if self._update_settings.get('prereleases'):
return True
return self.settings.get('__workflow_prereleases') or False
def check_update(self, force=False):
"""Call update script if it's time to check for a new release.
.. versionadded:: 1.9
The update script will be run in the background, so it won't
interfere in the execution of your workflow.
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:param force: Force update check
:type force: ``Boolean``
"""
key = '__workflow_latest_version'
frequency = self._update_settings.get('frequency',
DEFAULT_UPDATE_FREQUENCY)
if not force and not self.settings.get('__workflow_autoupdate', True):
self.logger.debug('Auto update turned off by user')
return
# Check for new version if it's time
if (force or not self.cached_data_fresh(key, frequency * 86400)):
repo = self._update_settings['github_slug']
# version = self._update_settings['version']
version = str(self.version)
from background import run_in_background
# update.py is adjacent to this file
update_script = os.path.join(os.path.dirname(__file__),
b'update.py')
cmd = ['/usr/bin/python', update_script, 'check', repo, version]
if self.prereleases:
cmd.append('--prereleases')
self.logger.info('checking for update ...')
run_in_background('__workflow_update_check', cmd)
else:
self.logger.debug('update check not due')
def start_update(self):
"""Check for update and download and install new workflow file.
.. versionadded:: 1.9
See :ref:`guide-updates` in the :ref:`user-manual` for detailed
information on how to enable your workflow to update itself.
:returns: ``True`` if an update is available and will be
installed, else ``False``
"""
import update
repo = self._update_settings['github_slug']
# version = self._update_settings['version']
version = str(self.version)
if not update.check_update(repo, version, self.prereleases):
return False
from background import run_in_background
# update.py is adjacent to this file
update_script = os.path.join(os.path.dirname(__file__),
b'update.py')
cmd = ['/usr/bin/python', update_script, 'install', repo, version]
if self.prereleases:
cmd.append('--prereleases')
self.logger.debug('downloading update ...')
run_in_background('__workflow_update_install', cmd)
return True
####################################################################
# Keychain password storage methods
####################################################################
def save_password(self, account, password, service=None):
"""Save account credentials.
If the account exists, the old password will first be deleted
(Keychain throws an error otherwise).
If something goes wrong, a :class:`KeychainError` exception will
be raised.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``unicode``
:param password: the password to secure
:type password: ``unicode``
:param service: Name of the service. By default, this is the
workflow's bundle ID
:type service: ``unicode``
"""
if not service:
service = self.bundleid
try:
self._call_security('add-generic-password', service, account,
'-w', password)
self.logger.debug('saved password : %s:%s', service, account)
except PasswordExists:
self.logger.debug('password exists : %s:%s', service, account)
current_password = self.get_password(account, service)
if current_password == password:
self.logger.debug('password unchanged')
else:
self.delete_password(account, service)
self._call_security('add-generic-password', service,
account, '-w', password)
self.logger.debug('save_password : %s:%s', service, account)
def get_password(self, account, service=None):
"""Retrieve the password saved at ``service/account``.
Raise :class:`PasswordNotFound` exception if password doesn't exist.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``unicode``
:param service: Name of the service. By default, this is the workflow's
bundle ID
:type service: ``unicode``
:returns: account password
:rtype: ``unicode``
"""
if not service:
service = self.bundleid
output = self._call_security('find-generic-password', service,
account, '-g')
# Parsing of `security` output is adapted from python-keyring
# by Jason R. Coombs
# https://pypi.python.org/pypi/keyring
m = re.search(
r'password:\s*(?:0x(?P<hex>[0-9A-F]+)\s*)?(?:"(?P<pw>.*)")?',
output)
if m:
groups = m.groupdict()
h = groups.get('hex')
password = groups.get('pw')
if h:
password = unicode(binascii.unhexlify(h), 'utf-8')
self.logger.debug('got password : %s:%s', service, account)
return password
def delete_password(self, account, service=None):
"""Delete the password stored at ``service/account``.
Raise :class:`PasswordNotFound` if account is unknown.
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``unicode``
:param service: Name of the service. By default, this is the workflow's
bundle ID
:type service: ``unicode``
"""
if not service:
service = self.bundleid
self._call_security('delete-generic-password', service, account)
self.logger.debug('deleted password : %s:%s', service, account)
####################################################################
# Methods for workflow:* magic args
####################################################################
def _register_default_magic(self):
"""Register the built-in magic arguments."""
# TODO: refactor & simplify
# Wrap callback and message with callable
def callback(func, msg):
def wrapper():
func()
return msg
return wrapper
self.magic_arguments['delcache'] = callback(self.clear_cache,
'Deleted workflow cache')
self.magic_arguments['deldata'] = callback(self.clear_data,
'Deleted workflow data')
self.magic_arguments['delsettings'] = callback(
self.clear_settings, 'Deleted workflow settings')
self.magic_arguments['reset'] = callback(self.reset,
'Reset workflow')
self.magic_arguments['openlog'] = callback(self.open_log,
'Opening workflow log file')
self.magic_arguments['opencache'] = callback(
self.open_cachedir, 'Opening workflow cache directory')
self.magic_arguments['opendata'] = callback(
self.open_datadir, 'Opening workflow data directory')
self.magic_arguments['openworkflow'] = callback(
self.open_workflowdir, 'Opening workflow directory')
self.magic_arguments['openterm'] = callback(
self.open_terminal, 'Opening workflow root directory in Terminal')
# Diacritic folding
def fold_on():
self.settings['__workflow_diacritic_folding'] = True
return 'Diacritics will always be folded'
def fold_off():
self.settings['__workflow_diacritic_folding'] = False
return 'Diacritics will never be folded'
def fold_default():
if '__workflow_diacritic_folding' in self.settings:
del self.settings['__workflow_diacritic_folding']
return 'Diacritics folding reset'
self.magic_arguments['foldingon'] = fold_on
self.magic_arguments['foldingoff'] = fold_off
self.magic_arguments['foldingdefault'] = fold_default
# Updates
def update_on():
self.settings['__workflow_autoupdate'] = True
return 'Auto update turned on'
def update_off():
self.settings['__workflow_autoupdate'] = False
return 'Auto update turned off'
def prereleases_on():
self.settings['__workflow_prereleases'] = True
return 'Prerelease updates turned on'
def prereleases_off():
self.settings['__workflow_prereleases'] = False
return 'Prerelease updates turned off'
def do_update():
if self.start_update():
return 'Downloading and installing update ...'
else:
return 'No update available'
self.magic_arguments['autoupdate'] = update_on
self.magic_arguments['noautoupdate'] = update_off
self.magic_arguments['prereleases'] = prereleases_on
self.magic_arguments['noprereleases'] = prereleases_off
self.magic_arguments['update'] = do_update
# Help
def do_help():
if self.help_url:
self.open_help()
return 'Opening workflow help URL in browser'
else:
return 'Workflow has no help URL'
def show_version():
if self.version:
return 'Version: {0}'.format(self.version)
else:
return 'This workflow has no version number'
def list_magic():
"""Display all available magic args in Alfred."""
isatty = sys.stderr.isatty()
for name in sorted(self.magic_arguments.keys()):
if name == 'magic':
continue
arg = self.magic_prefix + name
self.logger.debug(arg)
if not isatty:
self.add_item(arg, icon=ICON_INFO)
if not isatty:
self.send_feedback()
self.magic_arguments['help'] = do_help
self.magic_arguments['magic'] = list_magic
self.magic_arguments['version'] = show_version
def clear_cache(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`cachedir`.
:param filter_func: Callable to determine whether a file should be
deleted or not. ``filter_func`` is called with the filename
of each file in the data directory. If it returns ``True``,
the file will be deleted.
By default, *all* files will be deleted.
:type filter_func: ``callable``
"""
self._delete_directory_contents(self.cachedir, filter_func)
def clear_data(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`datadir`.
:param filter_func: Callable to determine whether a file should be
deleted or not. ``filter_func`` is called with the filename
of each file in the data directory. If it returns ``True``,
the file will be deleted.
By default, *all* files will be deleted.
:type filter_func: ``callable``
"""
self._delete_directory_contents(self.datadir, filter_func)
def clear_settings(self):
"""Delete workflow's :attr:`settings_path`."""
if os.path.exists(self.settings_path):
os.unlink(self.settings_path)
self.logger.debug('deleted : %r', self.settings_path)
def reset(self):
"""Delete workflow settings, cache and data.
File :attr:`settings <settings_path>` and directories
:attr:`cache <cachedir>` and :attr:`data <datadir>` are deleted.
"""
self.clear_cache()
self.clear_data()
self.clear_settings()
def open_log(self):
"""Open :attr:`logfile` in default app (usually Console.app)."""
subprocess.call(['open', self.logfile]) # nosec
def open_cachedir(self):
"""Open the workflow's :attr:`cachedir` in Finder."""
subprocess.call(['open', self.cachedir]) # nosec
def open_datadir(self):
"""Open the workflow's :attr:`datadir` in Finder."""
subprocess.call(['open', self.datadir]) # nosec
def open_workflowdir(self):
"""Open the workflow's :attr:`workflowdir` in Finder."""
subprocess.call(['open', self.workflowdir]) # nosec
def open_terminal(self):
"""Open a Terminal window at workflow's :attr:`workflowdir`."""
subprocess.call(['open', '-a', 'Terminal', self.workflowdir]) # nosec
def open_help(self):
"""Open :attr:`help_url` in default browser."""
subprocess.call(['open', self.help_url]) # nosec
return 'Opening workflow help URL in browser'
####################################################################
# Helper methods
####################################################################
def decode(self, text, encoding=None, normalization=None):
"""Return ``text`` as normalised unicode.
If ``encoding`` and/or ``normalization`` is ``None``, the
``input_encoding``and ``normalization`` parameters passed to
:class:`Workflow` are used.
:param text: string
:type text: encoded or Unicode string. If ``text`` is already a
Unicode string, it will only be normalised.
:param encoding: The text encoding to use to decode ``text`` to
Unicode.
:type encoding: ``unicode`` or ``None``
:param normalization: The nomalisation form to apply to ``text``.
:type normalization: ``unicode`` or ``None``
:returns: decoded and normalised ``unicode``
:class:`Workflow` uses "NFC" normalisation by default. This is the
standard for Python and will work well with data from the web (via
:mod:`~workflow.web` or :mod:`json`).
macOS, on the other hand, uses "NFD" normalisation (nearly), so data
coming from the system (e.g. via :mod:`subprocess` or
:func:`os.listdir`/:mod:`os.path`) may not match. You should either
normalise this data, too, or change the default normalisation used by
:class:`Workflow`.
"""
encoding = encoding or self._input_encoding
normalization = normalization or self._normalizsation
if not isinstance(text, unicode):
text = unicode(text, encoding)
return unicodedata.normalize(normalization, text)
def fold_to_ascii(self, text):
"""Convert non-ASCII characters to closest ASCII equivalent.
.. versionadded:: 1.3
.. note:: This only works for a subset of European languages.
:param text: text to convert
:type text: ``unicode``
:returns: text containing only ASCII characters
:rtype: ``unicode``
"""
if isascii(text):
return text
text = ''.join([ASCII_REPLACEMENTS.get(c, c) for c in text])
return unicode(unicodedata.normalize('NFKD',
text).encode('ascii', 'ignore'))
def dumbify_punctuation(self, text):
"""Convert non-ASCII punctuation to closest ASCII equivalent.
This method replaces "smart" quotes and n- or m-dashes with their
workaday ASCII equivalents. This method is currently not used
internally, but exists as a helper method for workflow authors.
.. versionadded: 1.9.7
:param text: text to convert
:type text: ``unicode``
:returns: text with only ASCII punctuation
:rtype: ``unicode``
"""
if isascii(text):
return text
text = ''.join([DUMB_PUNCTUATION.get(c, c) for c in text])
return text
def _delete_directory_contents(self, dirpath, filter_func):
"""Delete all files in a directory.
:param dirpath: path to directory to clear
:type dirpath: ``unicode`` or ``str``
:param filter_func function to determine whether a file shall be
deleted or not.
:type filter_func ``callable``
"""
if os.path.exists(dirpath):
for filename in os.listdir(dirpath):
if not filter_func(filename):
continue
path = os.path.join(dirpath, filename)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.unlink(path)
self.logger.debug('deleted : %r', path)
def _load_info_plist(self):
"""Load workflow info from ``info.plist``."""
# info.plist should be in the directory above this one
self._info = plistlib.readPlist(self.workflowfile('info.plist'))
self._info_loaded = True
def _create(self, dirpath):
"""Create directory `dirpath` if it doesn't exist.
:param dirpath: path to directory
:type dirpath: ``unicode``
:returns: ``dirpath`` argument
:rtype: ``unicode``
"""
if not os.path.exists(dirpath):
os.makedirs(dirpath)
return dirpath
def _call_security(self, action, service, account, *args):
"""Call ``security`` CLI program that provides access to keychains.
May raise `PasswordNotFound`, `PasswordExists` or `KeychainError`
exceptions (the first two are subclasses of `KeychainError`).
:param action: The ``security`` action to call, e.g.
``add-generic-password``
:type action: ``unicode``
:param service: Name of the service.
:type service: ``unicode``
:param account: name of the account the password is for, e.g.
"Pinboard"
:type account: ``unicode``
:param password: the password to secure
:type password: ``unicode``
:param *args: list of command line arguments to be passed to
``security``
:type *args: `list` or `tuple`
:returns: ``(retcode, output)``. ``retcode`` is an `int`, ``output`` a
``unicode`` string.
:rtype: `tuple` (`int`, ``unicode``)
"""
cmd = ['security', action, '-s', service, '-a', account] + list(args)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
if p.returncode == 44: # password does not exist
raise PasswordNotFound()
elif p.returncode == 45: # password already exists
raise PasswordExists()
elif p.returncode > 0:
err = KeychainError('Unknown Keychain error : %s' % stdout)
err.retcode = p.returncode
raise err
return stdout.strip().decode('utf-8')
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/workflow.py
|
workflow.py
|
from __future__ import absolute_import, print_function
import codecs
import json
import mimetypes
import os
import random
import re
import socket
import string
import unicodedata
import urllib
import urllib2
import urlparse
import zlib
__version__ = open(os.path.join(os.path.dirname(__file__), 'version')).read()
USER_AGENT = (u'Alfred-Workflow/' + __version__ +
' (+http://www.deanishe.net/alfred-workflow)')
# Valid characters for multipart form data boundaries
BOUNDARY_CHARS = string.digits + string.ascii_letters
# HTTP response codes
RESPONSES = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported'
}
def str_dict(dic):
"""Convert keys and values in ``dic`` into UTF-8-encoded :class:`str`.
:param dic: Mapping of Unicode strings
:type dic: dict
:returns: Dictionary containing only UTF-8 strings
:rtype: dict
"""
if isinstance(dic, CaseInsensitiveDictionary):
dic2 = CaseInsensitiveDictionary()
else:
dic2 = {}
for k, v in dic.items():
if isinstance(k, unicode):
k = k.encode('utf-8')
if isinstance(v, unicode):
v = v.encode('utf-8')
dic2[k] = v
return dic2
class NoRedirectHandler(urllib2.HTTPRedirectHandler):
"""Prevent redirections."""
def redirect_request(self, *args):
"""Ignore redirect."""
return None
# Adapted from https://gist.github.com/babakness/3901174
class CaseInsensitiveDictionary(dict):
"""Dictionary with caseless key search.
Enables case insensitive searching while preserving case sensitivity
when keys are listed, ie, via keys() or items() methods.
Works by storing a lowercase version of the key as the new key and
stores the original key-value pair as the key's value
(values become dictionaries).
"""
def __init__(self, initval=None):
"""Create new case-insensitive dictionary."""
if isinstance(initval, dict):
for key, value in initval.iteritems():
self.__setitem__(key, value)
elif isinstance(initval, list):
for (key, value) in initval:
self.__setitem__(key, value)
def __contains__(self, key):
return dict.__contains__(self, key.lower())
def __getitem__(self, key):
return dict.__getitem__(self, key.lower())['val']
def __setitem__(self, key, value):
return dict.__setitem__(self, key.lower(), {'key': key, 'val': value})
def get(self, key, default=None):
"""Return value for case-insensitive key or default."""
try:
v = dict.__getitem__(self, key.lower())
except KeyError:
return default
else:
return v['val']
def update(self, other):
"""Update values from other ``dict``."""
for k, v in other.items():
self[k] = v
def items(self):
"""Return ``(key, value)`` pairs."""
return [(v['key'], v['val']) for v in dict.itervalues(self)]
def keys(self):
"""Return original keys."""
return [v['key'] for v in dict.itervalues(self)]
def values(self):
"""Return all values."""
return [v['val'] for v in dict.itervalues(self)]
def iteritems(self):
"""Iterate over ``(key, value)`` pairs."""
for v in dict.itervalues(self):
yield v['key'], v['val']
def iterkeys(self):
"""Iterate over original keys."""
for v in dict.itervalues(self):
yield v['key']
def itervalues(self):
"""Interate over values."""
for v in dict.itervalues(self):
yield v['val']
class Request(urllib2.Request):
"""Subclass of :class:`urllib2.Request` that supports custom methods."""
def __init__(self, *args, **kwargs):
"""Create a new :class:`Request`."""
self._method = kwargs.pop('method', None)
urllib2.Request.__init__(self, *args, **kwargs)
def get_method(self):
return self._method.upper()
class Response(object):
"""
Returned by :func:`request` / :func:`get` / :func:`post` functions.
Simplified version of the ``Response`` object in the ``requests`` library.
>>> r = request('http://www.google.com')
>>> r.status_code
200
>>> r.encoding
ISO-8859-1
>>> r.content # bytes
<html> ...
>>> r.text # unicode, decoded according to charset in HTTP header/meta tag
u'<html> ...'
>>> r.json() # content parsed as JSON
"""
def __init__(self, request, stream=False):
"""Call `request` with :mod:`urllib2` and process results.
:param request: :class:`Request` instance
:param stream: Whether to stream response or retrieve it all at once
:type stream: bool
"""
self.request = request
self._stream = stream
self.url = None
self.raw = None
self._encoding = None
self.error = None
self.status_code = None
self.reason = None
self.headers = CaseInsensitiveDictionary()
self._content = None
self._content_loaded = False
self._gzipped = False
# Execute query
try:
self.raw = urllib2.urlopen(request)
except urllib2.HTTPError as err:
self.error = err
try:
self.url = err.geturl()
# sometimes (e.g. when authentication fails)
# urllib can't get a URL from an HTTPError
# This behaviour changes across Python versions,
# so no test cover (it isn't important).
except AttributeError: # pragma: no cover
pass
self.status_code = err.code
else:
self.status_code = self.raw.getcode()
self.url = self.raw.geturl()
self.reason = RESPONSES.get(self.status_code)
# Parse additional info if request succeeded
if not self.error:
headers = self.raw.info()
self.transfer_encoding = headers.getencoding()
self.mimetype = headers.gettype()
for key in headers.keys():
self.headers[key.lower()] = headers.get(key)
# Is content gzipped?
# Transfer-Encoding appears to not be used in the wild
# (contrary to the HTTP standard), but no harm in testing
# for it
if 'gzip' in headers.get('content-encoding', '') or \
'gzip' in headers.get('transfer-encoding', ''):
self._gzipped = True
@property
def stream(self):
"""Whether response is streamed.
Returns:
bool: `True` if response is streamed.
"""
return self._stream
@stream.setter
def stream(self, value):
if self._content_loaded:
raise RuntimeError("`content` has already been read from "
"this Response.")
self._stream = value
def json(self):
"""Decode response contents as JSON.
:returns: object decoded from JSON
:rtype: list, dict or unicode
"""
return json.loads(self.content, self.encoding or 'utf-8')
@property
def encoding(self):
"""Text encoding of document or ``None``.
:returns: Text encoding if found.
:rtype: str or ``None``
"""
if not self._encoding:
self._encoding = self._get_encoding()
return self._encoding
@property
def content(self):
"""Raw content of response (i.e. bytes).
:returns: Body of HTTP response
:rtype: str
"""
if not self._content:
# Decompress gzipped content
if self._gzipped:
decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._content = decoder.decompress(self.raw.read())
else:
self._content = self.raw.read()
self._content_loaded = True
return self._content
@property
def text(self):
"""Unicode-decoded content of response body.
If no encoding can be determined from HTTP headers or the content
itself, the encoded response body will be returned instead.
:returns: Body of HTTP response
:rtype: unicode or str
"""
if self.encoding:
return unicodedata.normalize('NFC', unicode(self.content,
self.encoding))
return self.content
def iter_content(self, chunk_size=4096, decode_unicode=False):
"""Iterate over response data.
.. versionadded:: 1.6
:param chunk_size: Number of bytes to read into memory
:type chunk_size: int
:param decode_unicode: Decode to Unicode using detected encoding
:type decode_unicode: bool
:returns: iterator
"""
if not self.stream:
raise RuntimeError("You cannot call `iter_content` on a "
"Response unless you passed `stream=True`"
" to `get()`/`post()`/`request()`.")
if self._content_loaded:
raise RuntimeError(
"`content` has already been read from this Response.")
def decode_stream(iterator, r):
dec = codecs.getincrementaldecoder(r.encoding)(errors='replace')
for chunk in iterator:
data = dec.decode(chunk)
if data:
yield data
data = dec.decode(b'', final=True)
if data: # pragma: no cover
yield data
def generate():
if self._gzipped:
decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
if self._gzipped:
chunk = decoder.decompress(chunk)
yield chunk
chunks = generate()
if decode_unicode and self.encoding:
chunks = decode_stream(chunks, self)
return chunks
def save_to_path(self, filepath):
"""Save retrieved data to file at ``filepath``.
.. versionadded: 1.9.6
:param filepath: Path to save retrieved data.
"""
filepath = os.path.abspath(filepath)
dirname = os.path.dirname(filepath)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.stream = True
with open(filepath, 'wb') as fileobj:
for data in self.iter_content():
fileobj.write(data)
def raise_for_status(self):
"""Raise stored error if one occurred.
error will be instance of :class:`urllib2.HTTPError`
"""
if self.error is not None:
raise self.error
return
def _get_encoding(self):
"""Get encoding from HTTP headers or content.
:returns: encoding or `None`
:rtype: unicode or ``None``
"""
headers = self.raw.info()
encoding = None
if headers.getparam('charset'):
encoding = headers.getparam('charset')
# HTTP Content-Type header
for param in headers.getplist():
if param.startswith('charset='):
encoding = param[8:]
break
if not self.stream: # Try sniffing response content
# Encoding declared in document should override HTTP headers
if self.mimetype == 'text/html': # sniff HTML headers
m = re.search(r"""<meta.+charset=["']{0,1}(.+?)["'].*>""",
self.content)
if m:
encoding = m.group(1)
elif ((self.mimetype.startswith('application/')
or self.mimetype.startswith('text/'))
and 'xml' in self.mimetype):
m = re.search(r"""<?xml.+encoding=["'](.+?)["'][^>]*\?>""",
self.content)
if m:
encoding = m.group(1)
# Format defaults
if self.mimetype == 'application/json' and not encoding:
# The default encoding for JSON
encoding = 'utf-8'
elif self.mimetype == 'application/xml' and not encoding:
# The default for 'application/xml'
encoding = 'utf-8'
if encoding:
encoding = encoding.lower()
return encoding
def request(method, url, params=None, data=None, headers=None, cookies=None,
files=None, auth=None, timeout=60, allow_redirects=False,
stream=False):
"""Initiate an HTTP(S) request. Returns :class:`Response` object.
:param method: 'GET' or 'POST'
:type method: unicode
:param url: URL to open
:type url: unicode
:param params: mapping of URL parameters
:type params: dict
:param data: mapping of form data ``{'field_name': 'value'}`` or
:class:`str`
:type data: dict or str
:param headers: HTTP headers
:type headers: dict
:param cookies: cookies to send to server
:type cookies: dict
:param files: files to upload (see below).
:type files: dict
:param auth: username, password
:type auth: tuple
:param timeout: connection timeout limit in seconds
:type timeout: int
:param allow_redirects: follow redirections
:type allow_redirects: bool
:param stream: Stream content instead of fetching it all at once.
:type stream: bool
:returns: Response object
:rtype: :class:`Response`
The ``files`` argument is a dictionary::
{'fieldname' : { 'filename': 'blah.txt',
'content': '<binary data>',
'mimetype': 'text/plain'}
}
* ``fieldname`` is the name of the field in the HTML form.
* ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
be used to guess the mimetype, or ``application/octet-stream``
will be used.
"""
# TODO: cookies
socket.setdefaulttimeout(timeout)
# Default handlers
openers = [urllib2.ProxyHandler(urllib2.getproxies())]
if not allow_redirects:
openers.append(NoRedirectHandler())
if auth is not None: # Add authorisation handler
username, password = auth
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, url, username, password)
auth_manager = urllib2.HTTPBasicAuthHandler(password_manager)
openers.append(auth_manager)
# Install our custom chain of openers
opener = urllib2.build_opener(*openers)
urllib2.install_opener(opener)
if not headers:
headers = CaseInsensitiveDictionary()
else:
headers = CaseInsensitiveDictionary(headers)
if 'user-agent' not in headers:
headers['user-agent'] = USER_AGENT
# Accept gzip-encoded content
encodings = [s.strip() for s in
headers.get('accept-encoding', '').split(',')]
if 'gzip' not in encodings:
encodings.append('gzip')
headers['accept-encoding'] = ', '.join(encodings)
if files:
if not data:
data = {}
new_headers, data = encode_multipart_formdata(data, files)
headers.update(new_headers)
elif data and isinstance(data, dict):
data = urllib.urlencode(str_dict(data))
# Make sure everything is encoded text
headers = str_dict(headers)
if isinstance(url, unicode):
url = url.encode('utf-8')
if params: # GET args (POST args are handled in encode_multipart_formdata)
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
if query: # Combine query string and `params`
url_params = urlparse.parse_qs(query)
# `params` take precedence over URL query string
url_params.update(params)
params = url_params
query = urllib.urlencode(str_dict(params), doseq=True)
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
req = Request(url, data, headers, method=method)
return Response(req, stream)
def get(url, params=None, headers=None, cookies=None, auth=None,
timeout=60, allow_redirects=True, stream=False):
"""Initiate a GET request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('GET', url, params, headers=headers, cookies=cookies,
auth=auth, timeout=timeout, allow_redirects=allow_redirects,
stream=stream)
def delete(url, params=None, data=None, headers=None, cookies=None, auth=None,
timeout=60, allow_redirects=True, stream=False):
"""Initiate a DELETE request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('DELETE', url, params, data, headers=headers,
cookies=cookies, auth=auth, timeout=timeout,
allow_redirects=allow_redirects, stream=stream)
def post(url, params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=60, allow_redirects=False, stream=False):
"""Initiate a POST request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('POST', url, params, data, headers, cookies, files, auth,
timeout, allow_redirects, stream)
def put(url, params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=60, allow_redirects=False, stream=False):
"""Initiate a PUT request. Arguments as for :func:`request`.
:returns: :class:`Response` instance
"""
return request('PUT', url, params, data, headers, cookies, files, auth,
timeout, allow_redirects, stream)
def encode_multipart_formdata(fields, files):
"""Encode form data (``fields``) and ``files`` for POST request.
:param fields: mapping of ``{name : value}`` pairs for normal form fields.
:type fields: dict
:param files: dictionary of fieldnames/files elements for file data.
See below for details.
:type files: dict of :class:`dict`
:returns: ``(headers, body)`` ``headers`` is a
:class:`dict` of HTTP headers
:rtype: 2-tuple ``(dict, str)``
The ``files`` argument is a dictionary::
{'fieldname' : { 'filename': 'blah.txt',
'content': '<binary data>',
'mimetype': 'text/plain'}
}
- ``fieldname`` is the name of the field in the HTML form.
- ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
be used to guess the mimetype, or ``application/octet-stream``
will be used.
"""
def get_content_type(filename):
"""Return or guess mimetype of ``filename``.
:param filename: filename of file
:type filename: unicode/str
:returns: mime-type, e.g. ``text/html``
:rtype: str
"""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
boundary = '-----' + ''.join(random.choice(BOUNDARY_CHARS)
for i in range(30))
CRLF = '\r\n'
output = []
# Normal form fields
for (name, value) in fields.items():
if isinstance(name, unicode):
name = name.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
output.append('--' + boundary)
output.append('Content-Disposition: form-data; name="%s"' % name)
output.append('')
output.append(value)
# Files to upload
for name, d in files.items():
filename = d[u'filename']
content = d[u'content']
if u'mimetype' in d:
mimetype = d[u'mimetype']
else:
mimetype = get_content_type(filename)
if isinstance(name, unicode):
name = name.encode('utf-8')
if isinstance(filename, unicode):
filename = filename.encode('utf-8')
if isinstance(mimetype, unicode):
mimetype = mimetype.encode('utf-8')
output.append('--' + boundary)
output.append('Content-Disposition: form-data; '
'name="%s"; filename="%s"' % (name, filename))
output.append('Content-Type: %s' % mimetype)
output.append('')
output.append(content)
output.append('--' + boundary + '--')
output.append('')
body = CRLF.join(output)
headers = {
'Content-Type': 'multipart/form-data; boundary=%s' % boundary,
'Content-Length': str(len(body)),
}
return (headers, body)
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/web.py
|
web.py
|
from __future__ import print_function, unicode_literals
from collections import defaultdict
from functools import total_ordering
import json
import os
import tempfile
import re
import subprocess
import workflow
import web
# __all__ = []
RELEASES_BASE = 'https://api.github.com/repos/{}/releases'
match_workflow = re.compile(r'\.alfred(\d+)?workflow$').search
_wf = None
def wf():
"""Lazy `Workflow` object."""
global _wf
if _wf is None:
_wf = workflow.Workflow()
return _wf
@total_ordering
class Download(object):
"""A workflow file that is available for download.
.. versionadded: 1.37
Attributes:
url (str): URL of workflow file.
filename (str): Filename of workflow file.
version (Version): Semantic version of workflow.
prerelease (bool): Whether version is a pre-release.
alfred_version (Version): Minimum compatible version
of Alfred.
"""
@classmethod
def from_dict(cls, d):
"""Create a `Download` from a `dict`."""
return cls(url=d['url'], filename=d['filename'],
version=Version(d['version']),
prerelease=d['prerelease'])
@classmethod
def from_releases(cls, js):
"""Extract downloads from GitHub releases.
Searches releases with semantic tags for assets with
file extension .alfredworkflow or .alfredXworkflow where
X is a number.
Files are returned sorted by latest version first. Any
releases containing multiple files with the same (workflow)
extension are rejected as ambiguous.
Args:
js (str): JSON response from GitHub's releases endpoint.
Returns:
list: Sequence of `Download`.
"""
releases = json.loads(js)
downloads = []
for release in releases:
tag = release['tag_name']
dupes = defaultdict(int)
try:
version = Version(tag)
except ValueError as err:
wf().logger.debug('ignored release: bad version "%s": %s',
tag, err)
continue
dls = []
for asset in release.get('assets', []):
url = asset.get('browser_download_url')
filename = os.path.basename(url)
m = match_workflow(filename)
if not m:
wf().logger.debug('unwanted file: %s', filename)
continue
ext = m.group(0)
dupes[ext] = dupes[ext] + 1
dls.append(Download(url, filename, version,
release['prerelease']))
valid = True
for ext, n in dupes.items():
if n > 1:
wf().logger.debug('ignored release "%s": multiple assets '
'with extension "%s"', tag, ext)
valid = False
break
if valid:
downloads.extend(dls)
downloads.sort(reverse=True)
return downloads
def __init__(self, url, filename, version, prerelease=False):
"""Create a new Download.
Args:
url (str): URL of workflow file.
filename (str): Filename of workflow file.
version (Version): Version of workflow.
prerelease (bool, optional): Whether version is
pre-release. Defaults to False.
"""
if isinstance(version, basestring):
version = Version(version)
self.url = url
self.filename = filename
self.version = version
self.prerelease = prerelease
@property
def alfred_version(self):
"""Minimum Alfred version based on filename extension."""
m = match_workflow(self.filename)
if not m or not m.group(1):
return Version('0')
return Version(m.group(1))
@property
def dict(self):
"""Convert `Download` to `dict`."""
return dict(url=self.url, filename=self.filename,
version=str(self.version), prerelease=self.prerelease)
def __str__(self):
"""Format `Download` for printing."""
u = ('Download(url={dl.url!r}, '
'filename={dl.filename!r}, '
'version={dl.version!r}, '
'prerelease={dl.prerelease!r})'.format(dl=self))
return u.encode('utf-8')
def __repr__(self):
"""Code-like representation of `Download`."""
return str(self)
def __eq__(self, other):
"""Compare Downloads based on version numbers."""
if self.url != other.url \
or self.filename != other.filename \
or self.version != other.version \
or self.prerelease != other.prerelease:
return False
return True
def __ne__(self, other):
"""Compare Downloads based on version numbers."""
return not self.__eq__(other)
def __lt__(self, other):
"""Compare Downloads based on version numbers."""
if self.version != other.version:
return self.version < other.version
return self.alfred_version < other.alfred_version
class Version(object):
"""Mostly semantic versioning.
The main difference to proper :ref:`semantic versioning <semver>`
is that this implementation doesn't require a minor or patch version.
Version strings may also be prefixed with "v", e.g.:
>>> v = Version('v1.1.1')
>>> v.tuple
(1, 1, 1, '')
>>> v = Version('2.0')
>>> v.tuple
(2, 0, 0, '')
>>> Version('3.1-beta').tuple
(3, 1, 0, 'beta')
>>> Version('1.0.1') > Version('0.0.1')
True
"""
#: Match version and pre-release/build information in version strings
match_version = re.compile(r'([0-9][0-9\.]*)(.+)?').match
def __init__(self, vstr):
"""Create new `Version` object.
Args:
vstr (basestring): Semantic version string.
"""
if not vstr:
raise ValueError('invalid version number: {!r}'.format(vstr))
self.vstr = vstr
self.major = 0
self.minor = 0
self.patch = 0
self.suffix = ''
self.build = ''
self._parse(vstr)
def _parse(self, vstr):
if vstr.startswith('v'):
m = self.match_version(vstr[1:])
else:
m = self.match_version(vstr)
if not m:
raise ValueError('invalid version number: ' + vstr)
version, suffix = m.groups()
parts = self._parse_dotted_string(version)
self.major = parts.pop(0)
if len(parts):
self.minor = parts.pop(0)
if len(parts):
self.patch = parts.pop(0)
if not len(parts) == 0:
raise ValueError('version number too long: ' + vstr)
if suffix:
# Build info
idx = suffix.find('+')
if idx > -1:
self.build = suffix[idx+1:]
suffix = suffix[:idx]
if suffix:
if not suffix.startswith('-'):
raise ValueError(
'suffix must start with - : ' + suffix)
self.suffix = suffix[1:]
def _parse_dotted_string(self, s):
"""Parse string ``s`` into list of ints and strings."""
parsed = []
parts = s.split('.')
for p in parts:
if p.isdigit():
p = int(p)
parsed.append(p)
return parsed
@property
def tuple(self):
"""Version number as a tuple of major, minor, patch, pre-release."""
return (self.major, self.minor, self.patch, self.suffix)
def __lt__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
t = self.tuple[:3]
o = other.tuple[:3]
if t < o:
return True
if t == o: # We need to compare suffixes
if self.suffix and not other.suffix:
return True
if other.suffix and not self.suffix:
return False
return self._parse_dotted_string(self.suffix) \
< self._parse_dotted_string(other.suffix)
# t > o
return False
def __eq__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
return self.tuple == other.tuple
def __ne__(self, other):
"""Implement comparison."""
return not self.__eq__(other)
def __gt__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
return other.__lt__(self)
def __le__(self, other):
"""Implement comparison."""
if not isinstance(other, Version):
raise ValueError('not a Version instance: {0!r}'.format(other))
return not other.__lt__(self)
def __ge__(self, other):
"""Implement comparison."""
return not self.__lt__(other)
def __str__(self):
"""Return semantic version string."""
vstr = '{0}.{1}.{2}'.format(self.major, self.minor, self.patch)
if self.suffix:
vstr = '{0}-{1}'.format(vstr, self.suffix)
if self.build:
vstr = '{0}+{1}'.format(vstr, self.build)
return vstr
def __repr__(self):
"""Return 'code' representation of `Version`."""
return "Version('{0}')".format(str(self))
def retrieve_download(dl):
"""Saves a download to a temporary file and returns path.
.. versionadded: 1.37
Args:
url (unicode): URL to .alfredworkflow file in GitHub repo
Returns:
unicode: path to downloaded file
"""
if not match_workflow(dl.filename):
raise ValueError('attachment not a workflow: ' + dl.filename)
path = os.path.join(tempfile.gettempdir(), dl.filename)
wf().logger.debug('downloading update from '
'%r to %r ...', dl.url, path)
r = web.get(dl.url)
r.raise_for_status()
r.save_to_path(path)
return path
def build_api_url(repo):
"""Generate releases URL from GitHub repo.
Args:
repo (unicode): Repo name in form ``username/repo``
Returns:
unicode: URL to the API endpoint for the repo's releases
"""
if len(repo.split('/')) != 2:
raise ValueError('invalid GitHub repo: {!r}'.format(repo))
return RELEASES_BASE.format(repo)
def get_downloads(repo):
"""Load available ``Download``s for GitHub repo.
.. versionadded: 1.37
Args:
repo (unicode): GitHub repo to load releases for.
Returns:
list: Sequence of `Download` contained in GitHub releases.
"""
url = build_api_url(repo)
def _fetch():
wf().logger.info('retrieving releases for %r ...', repo)
r = web.get(url)
r.raise_for_status()
return r.content
key = 'github-releases-' + repo.replace('/', '-')
js = wf().cached_data(key, _fetch, max_age=60)
return Download.from_releases(js)
def latest_download(dls, alfred_version=None, prereleases=False):
"""Return newest `Download`."""
alfred_version = alfred_version or os.getenv('alfred_version')
version = None
if alfred_version:
version = Version(alfred_version)
dls.sort(reverse=True)
for dl in dls:
if dl.prerelease and not prereleases:
wf().logger.debug('ignored prerelease: %s', dl.version)
continue
if version and dl.alfred_version > version:
wf().logger.debug('ignored incompatible (%s > %s): %s',
dl.alfred_version, version, dl.filename)
continue
wf().logger.debug('latest version: %s (%s)', dl.version, dl.filename)
return dl
return None
def check_update(repo, current_version, prereleases=False,
alfred_version=None):
"""Check whether a newer release is available on GitHub.
Args:
repo (unicode): ``username/repo`` for workflow's GitHub repo
current_version (unicode): the currently installed version of the
workflow. :ref:`Semantic versioning <semver>` is required.
prereleases (bool): Whether to include pre-releases.
alfred_version (unicode): version of currently-running Alfred.
if empty, defaults to ``$alfred_version`` environment variable.
Returns:
bool: ``True`` if an update is available, else ``False``
If an update is available, its version number and download URL will
be cached.
"""
key = '__workflow_latest_version'
# data stored when no update is available
no_update = {
'available': False,
'download': None,
'version': None,
}
current = Version(current_version)
dls = get_downloads(repo)
if not len(dls):
wf().logger.warning('no valid downloads for %s', repo)
wf().cache_data(key, no_update)
return False
wf().logger.info('%d download(s) for %s', len(dls), repo)
dl = latest_download(dls, alfred_version, prereleases)
if not dl:
wf().logger.warning('no compatible downloads for %s', repo)
wf().cache_data(key, no_update)
return False
wf().logger.debug('latest=%r, installed=%r', dl.version, current)
if dl.version > current:
wf().cache_data(key, {
'version': str(dl.version),
'download': dl.dict,
'available': True,
})
return True
wf().cache_data(key, no_update)
return False
def install_update():
"""If a newer release is available, download and install it.
:returns: ``True`` if an update is installed, else ``False``
"""
key = '__workflow_latest_version'
# data stored when no update is available
no_update = {
'available': False,
'download': None,
'version': None,
}
status = wf().cached_data(key, max_age=0)
if not status or not status.get('available'):
wf().logger.info('no update available')
return False
dl = status.get('download')
if not dl:
wf().logger.info('no download information')
return False
path = retrieve_download(Download.from_dict(dl))
wf().logger.info('installing updated workflow ...')
subprocess.call(['open', path]) # nosec
wf().cache_data(key, no_update)
return True
if __name__ == '__main__': # pragma: nocover
import sys
prereleases = False
def show_help(status=0):
"""Print help message."""
print('usage: update.py (check|install) '
'[--prereleases] <repo> <version>')
sys.exit(status)
argv = sys.argv[:]
if '-h' in argv or '--help' in argv:
show_help()
if '--prereleases' in argv:
argv.remove('--prereleases')
prereleases = True
if len(argv) != 4:
show_help(1)
action = argv[1]
repo = argv[2]
version = argv[3]
try:
if action == 'check':
check_update(repo, version, prereleases)
elif action == 'install':
install_update()
else:
show_help(1)
except Exception as err: # ensure traceback is in log file
wf().logger.exception(err)
raise err
|
Alfred-Workflow
|
/Alfred-Workflow-1.40.0.tar.gz/Alfred-Workflow-1.40.0/workflow/update.py
|
update.py
|
import json
import os
import shutil
import sys
import typing
from .db import NfsJsonDict, NfsJsonList, NfsJsonSet
from .environment import get_environment_info
from .fingerprint import fingerprint
class BenchmarkDb:
def __init__(self, path) -> None:
self.path = path
self._create_or_check_info_file()
self._arg_fingerprints = NfsJsonSet(os.path.join(path, "arg_fingerprints"))
self._data = NfsJsonList(os.path.join(path, "results"))
self._env_data = NfsJsonDict(os.path.join(path, "env_info"))
def _create_or_check_info_file(self):
info_path = os.path.join(self.path, "algbench.json")
if os.path.exists(info_path):
with open(info_path) as f:
info = json.load(f)
if info.get("version", "v0.0.0")[1] == "0":
msg = "Incompatible database of old version of AlgBench."
raise RuntimeError(msg)
else:
os.makedirs(self.path, exist_ok=True)
with open(info_path, "w") as f:
json.dump({"version": "v1.0.0"}, f)
def contains_fingerprint(self, fingerprint):
return fingerprint in self._arg_fingerprints
def insert(self, entry: typing.Dict):
# extract data from entry
env_fingp = entry["env_fingerprint"]
env_data = entry["env"]
arg_fingerprint = entry["args_fingerprint"]
result = {k: v for k, v in entry.items() if k != "env"}
# write into database
self._arg_fingerprints.add(arg_fingerprint)
self._env_data[env_fingp] = env_data
self._data.append(result)
def add(self, arg_fingerprint, arg_data, result):
argv = (" ".join(sys.argv) if sys.argv else "",)
self._arg_fingerprints.add(arg_fingerprint)
env_data = get_environment_info()
env_fingp = fingerprint(env_data)
self._env_data[env_fingp] = env_data
result["env_fingerprint"] = env_fingp
result["args_fingerprint"] = arg_fingerprint
result["parameters"] = arg_data
result["argv"] = argv
self._data.append(result)
def compress(self):
self._arg_fingerprints.compress()
self._data.compress()
self._env_data.compress()
def delete(self):
self._arg_fingerprints.delete()
self._data.delete()
self._env_data.delete()
shutil.rmtree(self.path)
def clear(self):
self._arg_fingerprints.clear()
self._data.clear()
self._env_data.clear()
def get_env_info(self, env_fingerprint):
return self._env_data[env_fingerprint]
def __iter__(self):
for entry in self._data:
entry = entry.copy()
try:
entry["env"] = self.get_env_info(entry["env_fingerprint"])
yield entry
except KeyError:
pass
def front(self) -> typing.Optional[typing.Dict]:
try:
return next(self.__iter__())
except StopIteration:
return None
|
AlgBench
|
/AlgBench-2.2.0-py3-none-any.whl/algbench/benchmark_db.py
|
benchmark_db.py
|
import datetime
import inspect
import logging
import sys
import traceback
import typing
from contextlib import ExitStack, redirect_stderr, redirect_stdout
import yaml
from ._stream_utils import NotSavingIO, PrintingStringIO, StreamWithTime
from .benchmark_db import BenchmarkDb
from .db.json_serializer import to_json
from .fingerprint import fingerprint
from .log_capture import JsonLogCapture, JsonLogHandler
from .utils import Timer
class Benchmark:
"""
This is the heart of the library. It allows to run, save, and load
a benchmark.
The function `add` will run a configuration, if it is not
already in the database. You can also split this into `check` and
`run`. This may be advised if you want to distribute the execution.
The following functions are thread-safe:
- exists
- run
- add
- insert
- front
- capture_logger
- unlink_logger
- __iter__
Don't call any of the other functions while the benchmark is
running. It could lead to data loss.
"""
def __init__(
self,
path: str,
save_output: bool = True,
hide_output: bool = True,
save_output_with_time: bool = True,
) -> None:
"""
Just specify the path of where to put the
database and everything else happens magically.
Make sure not to use the same path for different
databases, as they will get mixed.
:param path: The path to the database.
:param save_output: If true, all output (stdout and stderr) will be
saved. If set to false, the output will be discarded. This is
useful if you have a lot of output and don't want to waste disk
space. However, you will not be able to see the output of the
algorithm afterwards. Note that the output can only be saved if
the code aquires the Python sys.stdout and sys.stderr streams
during the execution, as the corresponding streams are replaced
by the benchmark. Normal ``print`` statements do so, but
``logging.StreamHandler`` does not. For the latter, use
``Benchmark.capture_logger``.
:param hide_output: If true, all output (stdout and stderr) will be
hidden. This is useful if you have a lot of output and don't want
to clutter your console. However, you will not be able to see the
output of the algorithm while it is running. Code the aquired handles
to the Python sys.stdout and sys.stderr streams earlier will still be
able to print to the console, as they circumvent the replacement.
:param save_output_with_time: If true, all output (stdout and stderr)
will be saved with the time it was written. This gives you more
insights on the runtime of the algorithm, but also increases the
size of the database. This option is ignored if `save_output` is
set to false.
"""
self._db = BenchmarkDb(path)
self._save_output = save_output
self._hide_output = hide_output
self._save_output_with_time = save_output_with_time
self._log_captures = {}
def capture_logger(self, logger_name: str, level=logging.NOTSET):
"""
Capture the logs of a logger of the Python logging module.
This allows you to precisely control which logs you want to
capture. Prefer logging to stdout/stderr, as just using ``print``
will not allow you to control the output of sub-algorithms.
The logging module also allows you to serch more easily for
specific log entries, if used correctly. However, it is
more expensive than just using ``print`` as more metadata
is created. Don't overuse it but only log important events
in the algorithm.
:param logger_name: The name of the logger to capture.
:param level: The level of the logger to capture. The logger will
will automatically be set to this level while capturing, but
will be reset afterwards. NOTSET will not change the level.
:return: None
"""
self._log_captures[logger_name] = level
def unlink_logger(self, logger_name: str):
"""
Stop capturing the logs of a logger of the Python logging module
while the benchmark is running.
"""
del self._log_captures[logger_name]
def _get_arg_data(self, func, args, kwargs):
sig = inspect.signature(func)
func_args = {
k: v.default
for k, v in sig.parameters.items()
if v.default is not inspect.Parameter.empty
}
func_args.update(sig.bind(*args, **kwargs).arguments)
data = {
"func": func.__name__,
"args": {
key: value
for key, value in func_args.items()
if not key.startswith("_")
},
}
return fingerprint(data), to_json(data)
def exists(self, func: typing.Callable, *args, **kwargs) -> bool:
"""
Use this function to check if an entry already exist and thus
does not have to be run again. If you want to have multiple
samples, add a sample index argument.
Caveat: This function may have false negatives. i.e., says that it
does not exist despite it existing (only for fresh data).
"""
fingp, _ = self._get_arg_data(func, args, kwargs)
return self._db.contains_fingerprint(fingp)
def _get_stream_obj(self, forward_stream):
if not self._save_output:
# This wrapper just adds a ``getvalue`` method to the stream,
# so it can be used drop-in for StringIO.
return NotSavingIO(forward_stream)
if self._save_output_with_time:
# SteamWithTime is a wrapper around StringIO.
# It stores the time of each line.
# getvalue() returns a list of tuples (time, line).
return StreamWithTime(forward_stream)
else:
return PrintingStringIO(forward_stream)
def run(self, func: typing.Callable, *args, **kwargs):
"""
Will add the function call with the arguments
to the benchmark.
The output of stdout and stderr will be captured and stored,
but not printed to the console.
"""
fingp, arg_data = self._get_arg_data(func, args, kwargs)
try:
stdout = self._get_stream_obj(sys.stdout if not self._hide_output else None)
stderr = self._get_stream_obj(sys.stderr if not self._hide_output else None)
with ExitStack() as logging_stack:
log_handler = JsonLogHandler()
for logger_name, level in self._log_captures.items():
logging_stack.enter_context(
JsonLogCapture(logger_name, level, log_handler)
)
with redirect_stdout(stdout), redirect_stderr(stderr):
timestamp = datetime.datetime.now().isoformat()
timer = Timer()
result = func(*args, **kwargs)
runtime = timer.time()
self._db.add(
arg_fingerprint=fingp,
arg_data=arg_data,
result={
"result": result,
"timestamp": timestamp,
"runtime": runtime,
"stdout": stdout.getvalue(),
"stderr": stderr.getvalue(),
"logging": log_handler.get_entries(),
},
)
print(".", end="") # flake8: noqa T201
except Exception as e:
print() # flake8: noqa T201
print("Exception while running benchmark.") # flake8: noqa T201
print("=====================================") # flake8: noqa T201
print(yaml.dump(arg_data)) # flake8: noqa T201
print("-------------------------------------") # flake8: noqa T201
print("ERROR:", e, f"({type(e)})") # flake8: noqa T201
print(traceback.format_exc()) # flake8: noqa T201
print("-------------------------------------") # flake8: noqa T201
raise
def add(self, func: typing.Callable, *args, **kwargs):
"""
Will add the function call with the arguments
to the benchmark if not yet contained.
Combination of `check` and `run`.
Will only call `run` if the arguments are not
yet in the benchmark.
"""
if not self.exists(func, *args, **kwargs):
self.run(func, *args, **kwargs)
def insert(self, entry: typing.Dict):
"""
Insert a raw entry, as returned by `__iter__` or `front`.
"""
self._db.insert(entry)
def compress(self):
"""
Compress the data of the benchmark to take less disk space.
NOT THREAD-SAFE!
"""
self._db.compress()
def repair(self):
"""
Repairs the benchmark in case it has some broken entries.
NOT THREAD-SAFE!
"""
self.delete_if(lambda data: False)
def __iter__(self) -> typing.Generator[typing.Dict, None, None]:
"""
Iterate over all entries in the benchmark.
Use `front` to get a preview on how an entry looks like.
"""
for entry in self._db:
yield entry.copy()
def delete(self):
"""
Delete the benchmark and all its files. Do not use it afterwards,
there are no files left to write results into.
If you just want to delete the content, use `clear.
NOT THREAD-SAFE!
"""
self._db.delete()
def front(self) -> typing.Optional[typing.Dict]:
"""
Return the first entry of the benchmark.
Useful for checking its content.
"""
return self._db.front()
def clear(self):
"""
Clears all entries of the benchmark, without deleting
the benchmark itself. You can continue to use it afterwards.
NOT THREAD-SAFE!
"""
self._db.clear()
def delete_if(self, condition: typing.Callable[[typing.Dict], bool]):
"""
Delete entries if a specific condition is met.
This is currently inefficiently, as always a copy
of the benchmark is created.
Use `front` to get a preview on how an entry that is
passed to the condition looks like.
NOT THREAD-SAFE!
"""
import tempfile
with tempfile.TemporaryDirectory() as tmpdirname:
benchmark_copy = Benchmark(tmpdirname)
for entry in self:
if not condition(entry):
benchmark_copy.insert(entry)
self.clear()
for entry in benchmark_copy:
self.insert(entry)
self.compress()
benchmark_copy.delete()
|
AlgBench
|
/AlgBench-2.2.0-py3-none-any.whl/algbench/benchmark.py
|
benchmark.py
|
import logging
from typing import Optional
from .db.json_serializer import to_json
from .utils import Timer
class JsonLogHandler(logging.Handler):
"""
A logging handler that stores log entries in a list of JSON compatible
dictionaries.
"""
def __init__(self, level=logging.NOTSET) -> None:
"""
:param level: The level of the logger to catch.
"""
super().__init__(level)
self._log = []
self._timer = Timer()
def emit(self, record: logging.LogRecord) -> None:
data = {}
data.update(record.__dict__)
data["runtime"] = self._timer.time()
self._log.append(to_json(data))
def reset(self):
self._timer.reset()
self._log = []
def get_entries(self) -> list:
return self._log
class JsonLogCapture:
"""
A context manager that captures logs and returns them as a list of JSON
"""
def __init__(
self,
logger_name: str,
level=logging.NOTSET,
handler: Optional[JsonLogHandler] = None,
) -> None:
"""
:param logger_name: The name of the logger to catch.
:param level: The level of the logger to catch.
"""
self._logger = logging.getLogger(logger_name)
self._level = level
self._prior_level = self._logger.getEffectiveLevel()
self._json_log: JsonLogHandler = handler if handler else JsonLogHandler(level)
def __enter__(self):
self._json_log.reset()
self._logger.addHandler(self._json_log)
if self._level:
self._prior_level = self._logger.getEffectiveLevel()
self._logger.setLevel(self._level)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._logger.removeHandler(self._json_log)
self._json_log.close()
if self._level:
self._logger.setLevel(self._prior_level)
def get_entries(self) -> list:
"""
Returns the log entries as a list of JSON compatible dictionaries.
"""
return self._json_log.get_entries()
|
AlgBench
|
/AlgBench-2.2.0-py3-none-any.whl/algbench/log_capture.py
|
log_capture.py
|
import typing
import pandas as pd
from .benchmark import Benchmark
def _describe_data(
data: typing.Dict, indentation: int, max_length: int, max_depth: int
):
if max_depth == 0:
return
for i, (key, val) in enumerate(data.items()):
if i >= max_length:
if i < len(data) - 1:
print(indentation * "|", "...") # flake8: noqa T201
return
if isinstance(val, dict):
if max_depth == 1:
print(indentation * "|", f"{key}: ...") # flake8: noqa T201
else:
print(indentation * "|", f"{key}:") # flake8: noqa T201
_describe_data(val, indentation + 1, max_length, max_depth - 1)
else:
val_text = str(val)
if len(val_text) > 80:
val_text = val_text[:77] + "..."
print(indentation * "|", f"{key}: {val_text}") # flake8: noqa T201
def describe(path: str):
"""
Describe the benchmark by printing the first entry.
"""
print("An entry in the database can look like this:") # flake8: noqa T201
print("_____________________________________________") # flake8: noqa T201
entry = Benchmark(path).front()
if not entry:
return
_describe_data(entry, 0, 20, 5)
print("______________________________________________") # flake8: noqa T201
print( # flake8: noqa T201
"Note that this is only based on the first entry,"
" other entries could differ."
)
def read_as_pandas(
path: str, row_creator: typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]
) -> pd.DataFrame:
"""
Read the benchmark as pandas table.
For this, you have to tell the function, which data should
go into which column. If you want to skip an entry, return None (or an empty dict) in the row_creator.
An example could look like this:
.. code-block:: python
t = read_as_pandas(
"./03_benchmark_data/",
lambda result: {
"instance": result["parameters"]["args"]["instance_name"],
"strategy": result["parameters"]["args"]["alg_params"]["strategy"],
"interchange": result["parameters"]["args"]["alg_params"].get(
"interchange", None
),
"colors": result["result"]["n_colors"],
"runtime": result["runtime"],
"num_vertices": result["result"]["num_vertices"],
"num_edges": result["result"]["num_edges"],
},
)
:param path: Path to the benchmark
:param row_creator: Function that creates a row from an entry
:return: Pandas DataFrame
"""
data: typing.Dict[str, list] = {}
n = 0
benchmark = Benchmark(path)
for entry in benchmark:
row = row_creator(entry)
if not row:
# Skip entry
continue
for key, value in row.items():
if key not in data:
data[key] = n * []
data[key].append(value)
n += 1
# Fill up missing entries with None
for column in data:
if column not in row:
data[column].append(None)
return pd.DataFrame(data=data)
|
AlgBench
|
/AlgBench-2.2.0-py3-none-any.whl/algbench/pandas.py
|
pandas.py
|
import datetime
import json
import logging
import os
import os.path
import pathlib
import random
import shutil
import socket
import typing
import zipfile
from zipfile import ZipFile
from .json_serializer import to_json
_log = logging.getLogger("AlgBench")
class NfsJsonList:
"""
A simple database to dump data (dictionaries) into. Should be reasonably threadsafe
even for slurm pools with NFS.
"""
def __init__(self, path: typing.Union[str, pathlib.Path]):
self.path: typing.Union[str, pathlib.Path] = path
if not os.path.exists(path):
# Could fail in very few unlucky cases on an NFS (parallel creations)
os.makedirs(path, exist_ok=True)
_log.info(f"Created new database '{path}'.")
if os.path.isfile(path):
msg = f"Cannot create database {path} because there exists an equally named file."
raise RuntimeError(msg)
self._subfile_path: typing.Union[str, pathlib.Path] = self._get_unique_name()
self._cache: typing.List = []
def _get_unique_name(self, _tries=3):
"""
Generate a unique file name to prevent collisions of parallel processes.
"""
if _tries <= 0:
msg = "Could not generate a unique file name. This is odd."
raise RuntimeError(msg)
hostname = socket.gethostname()
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M")
rand = random.randint(0, 10000)
name = f"{timestamp}-{hostname}-{rand}.data"
if os.path.exists(name):
return self._get_unique_name(_tries=_tries - 1)
return name
def compress(self, compression=zipfile.ZIP_LZMA, compresslevel=None):
"""
Warning: This may not be threadsafe! If you want to extract all data to
a single file, just use 'read' and dump the output into a single json.
"""
compr_path = os.path.join(self.path, "_compressed.zip")
with ZipFile(
compr_path, "a", compression=compression, compresslevel=compresslevel
) as z:
for file_name in os.listdir(self.path):
path = os.path.join(self.path, file_name)
if not os.path.isfile(path) or not path.endswith(".data"):
continue
if os.path.getsize(path) <= 0:
_log.warning(f"Skipping '{path}' due to zero size.")
continue
_log.info(f"Compressing '{file_name}' of size {os.path.getsize(path)}.")
z.write(path, file_name)
os.remove(path)
_log.info(f"Compressed database has size {os.path.getsize(compr_path)}.")
def extend(self, entries: typing.List, flush=True):
_log.info(f"Adding {len(entries)} items to database.")
serialized_data = [to_json(e) for e in entries]
self._cache += serialized_data
if flush:
self.flush()
return serialized_data
def append(self, entry, flush=True):
return self.extend([entry], flush)[0]
def flush(self):
if not self._cache:
return
path = os.path.join(self.path, self._subfile_path)
with open(path, "a") as f:
for data in self._cache:
data = to_json(data)
f.write(json.dumps(data) + "\n")
_log.info(f"Wrote {len(self._cache)} entries to disk.")
if os.path.getsize(path) <= 0:
msg = "Could not write to disk. Resulting file has zero size."
raise RuntimeError(msg)
if not os.path.isfile(path):
msg = "Could not write to disk for unknown reasons."
raise RuntimeError(msg)
self._cache.clear()
def iter_cache(self):
yield from self._cache
def iter_compressed(self):
compr_path = os.path.join(self.path, "_compressed.zip")
if os.path.exists(compr_path):
with ZipFile(compr_path, "r") as z:
for filename in z.filelist:
with z.open(filename, "r") as f:
for line in f.readlines():
try:
entry = json.loads(line)
yield entry
except Exception:
# Just continue. Probably a synchronization
# thing of the NFS.
_log.warning(
f'Could not load "{line}" in "{compr_path}".'
)
def iter_uncompressed(self):
# load uncompressed data
for fp in os.listdir(self.path):
path = os.path.join(self.path, fp)
if not os.path.isfile(path) or not path.endswith(".data"):
continue
with open(path) as f:
for entry in f.readlines():
try:
entry_ = json.loads(entry)
yield entry_
except Exception:
# Just continue. Probably a synchronization thing of the NFS.
_log.warning(f'Could not load "{entry}" in "{path}".')
def __iter__(self):
for entry in self.iter_compressed():
yield entry
for entry in self.iter_uncompressed():
yield entry
for entry in self.iter_cache():
yield entry
def load(self) -> typing.List:
return list(self)
def clear(self):
"""
Clear database (cache and disk). Note that remaining data in the
cache of other nodes may still be written.
"""
# cache
self._cache.clear()
# compressed
compr_path = os.path.join(self.path, "_compressed.zip")
if os.path.exists(compr_path):
os.remove(compr_path)
# remaining .data files
for fp in os.listdir(self.path):
path = os.path.join(self.path, fp)
if not os.path.isfile(path) or not str(path).endswith(".data"):
continue
os.remove(path)
def __del__(self):
self.flush()
def delete(self):
self._cache.clear()
shutil.rmtree(self.path)
|
AlgBench
|
/AlgBench-2.2.0-py3-none-any.whl/algbench/db/nfs_json_list.py
|
nfs_json_list.py
|
[](https://badge.fury.io/py/AlgDiff)
# AlgDiff
AlgDiff: A Python class that provides all necessary tools for the design, analysis, and discretization of algebraic differentiators. An interface to Matlab is also provided.
This implementation was released as part of the survey [[1]](#1). A tutorial for algebraic differentiators and this package can be found in [[6]](#6).
The toolbox is licensed under the BSD-3-Clause License, which is suitable for both academic and industrial/commercial purposes.
This code has been created for research purposes at the [Chair of Systems Theory and Control Engineering](https://www.uni-saarland.de/en/chair/rudolph.html) of Saarland University, Germany.
We apply algebraic differentiators to solve different problems related to control theory and signal processing: Parameter estimation, feedback control, fault detection and fault tolerant control, model-free control ...
Table of Contents
=================
* [Motivation](#motivation)
* [On algebraic differentiators](#on-algebraic-differentiators)
* [GUI](#gui)
* [Prerequisites for the implementation](#prerequisites-for-the-implementation)
* [Installation](#installation)
* [How to use the implementation](#how-to-use--the-implementation)
* [Troubleshooting](#troubleshooting)
* [Documentation](#documentation)
* [Questions & Contact](#questions---contact)
* [License](#license)
* [References](#references)
# Motivation
Estimating the derivatives of noisy signals is of paramount importance in many
fields of engineering and applied mathematics. It is, however, a longstanding ill-posed
and challenging problem, in the sense that a small error in measurement data can
induce a significant error in the estimated derivatives.
Figure 1 shows the results of the numerical estimation of the first time derivative of a noisy signal based on an algebraic differentiator on the one hand and the simple difference quotient rule on the other. This simulation shows the excellent performance of this numerical differentiation approach.
|  |
|:--:|
| Figure 1. Numerical differentiation of a noisy signal using a simple difference quotient on the one hand and an algebraic differentiator on the other |
# On algebraic differentiators
Algebraic differentiators have been derived and discussed in the systems and control theory community. The initial works based on differential-algebraic methods have been developed by Mboup, Join, and Fliess in [[2]](#2). These numerical, non-asymptotic approximation approaches
for higher-order derivatives of noisy signals are well suited for real-time embedded systems. A historical overview and a detailed discussion of these differentiators and their time-domain and frequency-domain properties are given in the survey [[1]](#1).
The approximation-theoretic derivation recalled in the survey [[1]](#1) permits the interpretation of the estimation process by the following three steps illustrated in the figure below stemming from [[1]](#1):
1. Projection: At time <img src="https://render.githubusercontent.com/render/math?math=t">, the sough <img src="https://render.githubusercontent.com/render/math?math=n">-th order time derivative <img src="https://render.githubusercontent.com/render/math?math=y^{(n)}"> over the interval <img src="https://render.githubusercontent.com/render/math?math=I_{T}(t)"> is projected onto the space of polynomials of degree <img src="https://render.githubusercontent.com/render/math?math=\mathrm{N}">. This yields the polynomial <img src="https://render.githubusercontent.com/render/math?math=p_\mathrm{N}"> depicted in the left and middle part of Figure 2.
2. Evaluation: The polynomial <img src="https://render.githubusercontent.com/render/math?math=p_\mathrm{N}"> is evaluated at <img src="https://render.githubusercontent.com/render/math?math=t-\delta_t">, which gives an estimate <img src="https://render.githubusercontent.com/render/math?math={\hat{y}^{(n)}(t)=p_{\N}(t-\delta_t)}"> for the derivative <img src="https://render.githubusercontent.com/render/math?math=y^{(n)}(t)"> as depicted in the central part of Figure 2. Choosing the delay to be the largest root of a special Jacobi polynomial increases the approximation order by 1 with a minimal delay. Alternatively, a delay-free estimation or even a prediction of the future derivative might be
selected, at the cost of a reduced accuracy.
3. Repetition: The first two steps are repeated at each discrete time instant <img src="https://render.githubusercontent.com/render/math?math=t_i"> while keeping the parameters of the differentiator constant. This yields the estimate <img src="https://render.githubusercontent.com/render/math?math=\hat{y}^{(n)}"> depicted in the right part of the Figure 2.
|  |
|:--:|
| Figure 2. Three-step process of the estimation of the <img src="https://render.githubusercontent.com/render/math?math=n">-th order derivative <img src="https://render.githubusercontent.com/render/math?math={y^{(n)}:t\mapsto y^{(n)}(t)}"> of a signal <img src="https://render.githubusercontent.com/render/math?math=y:t\mapsto y(t)"> using algebraic differentiators (figure from [[1]](#1)) |
Algebraic differentiators can be interpreted as linear time-invariant filters with a finite-duration impulse response. Figure 3 visualizes the online estimation process of the first derivative of a noisy signal. The filter window, the buffered signal, and the filter kernel can be clearly seen.
| <img src="https://github.com/aothmane-control/Algebraic-differentiators/blob/master/data/animationEstimation.gif" height="500">|
|:--:|
| Figure 3. Visualization of the online estimation of the first derivative a noisy signal using an algebraic differentiator.|
These filters can be approximated as lowpass filters with a known cutoff frequency and a stopband slope. Figure 4 presents the amplitude and phase spectra of two exemplary filters. The lowpass approximation is also shown.
|  |
|:--:|
| Figure 4. Amplitude and phase spectra of two different filters and the corresponding lowpass approximation of the amplitude spectrum |
See [[1]](#1), [[3]](#3), [[4]](#4), and [[5]](#5) for more details on the parametrization of these differentiators.
# GUI
Since Version 1.1 a GUI is provided. Executable files for Linux and Windows operating systems are provided and do not require the installation of additional software. The binary files of the GUIs for different operating systems can be downloaded from the latest release [page](https://github.com/aothmane-control/Algebraic-differentiators/releases).
Neither Python not Matlab have to be installed to start designing algebraic differentiators, get discrete filter coefficients, and estimate derivatives. This GUI can be used to plot relevant data (impulse and step responses, amplitude and phase spectra, estimated derivatives, ...), display relevant properties of the differentiators (estimation delay, cutoff frequency, window length, discretization effects, ...), and load measured signals for the estimation of their derivatives without a single line of code. Relevant properties, signals, spectra, and discrete filter coefficients can be exported for further processing. For testing the import of measurement data, a [file](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/QuickStart.ipynb) has been provided in the folder DataForGUI.
|  |
|:--:|
| Figure 5. GUI for the interactive design, analysis, and use of algebraic differentiators |
# Prerequisites for the implementation
The code is implemented in Python 3. To use all functionalities the required packages are given in the requirements.txt file. The examples implemented in Python are written in [jupyter notebooks](https://jupyter.org/) and require the packages [jupyter_latex_envs](https://github.com/jfbercher/jupyter_latex_envs) for the generation of useful documentations and [matplotlib](https://matplotlib.org/) for the creation of plots. The functions in the toolbox can also be used in Matlab for which different examples are also included. Check the Matlab [documentation](https://de.mathworks.com/help/matlab/matlab_external/install-supported-python-implementation.html) for more details on the compatibility of your Matlab version with Python.
# Installation
To use AlgDiff using pip run the following in the command line:
```
$ pip install AlgDiff
```
# How to use the implementation
The contribution of this implementation is an easy to use framework for the design and discretization of algebraic differentiators to achieve desired filter characteristics, i.e., to specify the cutoff frequency and the stopband slope. The file [algebraicDifferentiator.py](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/algebraicDifferentiator.py) implements the class AlgebraicDifferentiator. This class contains all necessary functions for the design, analysis, and discretization of the differentiators.
Different examples are provided as jupyter notebooks and Matlab code in the following:
* A quick start in a jupyter [notebook](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/QuickStart.ipynb) available also as an [HTML file](https://htmlpreview.github.io/?https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/QuickStart.html)
* A detailed jupyter [notebook](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/DetailedExamples.ipynb) available also as an [HTML file](https://htmlpreview.github.io/?https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/DetailedExamples.html)
* The simultaneous elimination of a harmonic disturbance and approximation of derivatives is demonstrated in the jupyter [notebook](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/EliminationDisturbancesExample.ipynb) available also as an [HTML file](https://htmlpreview.github.io/?https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/EliminationDisturbancesExample.html)
* A quick start in [Matlab](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/QuickStart.mlx)
* A [Matlab](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/examples/DetailedExamples.mlx) code with several examples
# Troubleshooting
A list of known issues and fixes is also [provided](https://algebraic-differentiators.readthedocs.io/en/latest/troubleshooting.html).
# Documentation
A detailed [documentation](https://algebraic-differentiators.readthedocs.io/en/latest/documentation.html) for all the functions is also provided in the webpage of the [project](https://algebraic-differentiators.readthedocs.io/en/latest/index.html).
# Questions & Contact
Feel free to contact [Amine](https://www.uni-saarland.de/en/chair/rudolph/staff/aothmane.html) in case of suggestions or questions.
# License
BSD 3-Clause "New" or "Revised" License, see [License-file](https://github.com/aothmane-control/Algebraic-differentiators/blob/master/LICENSE).
# References
<a id="5">[1]</a> A. Othmane, L. Kiltz, and J. Rudolph, "Survey on algebraic numerical differentiation: historical developments, parametrization, examples, and applications", Int. J. Syst. Sci. https://doi.org/10.1080/00207721.2022.2025948
<a id="1">[2]</a> M. Mboup, C. Join, and M. Fliess, "Numerical differentiation with annihilators in noisy environment", Numerical Algorithms, 50 (4), 439–467, 2009, https://doi.org/10.1007/s11075-008-9236-1
<a id="2">[3]</a> L. Kiltz and J. Rudolph, “Parametrization of algebraic numerical
differentiators to achieve desired filter characteristics,” in Proc. 52nd
IEEE Conf. on Decision and Control, Firenze, Italy, 2013, pp. 7010–
7015, https://doi.org/10.1109/CDC.2013.6761000
<a id="3">[4]</a> M. Mboup and S. Riachy, "Frequency-domain analysis and tuning of the algebraic differentiators," Int. J. Control , 91 (9), 2073–2081, 2018, https://doi.org/10.1080/00207179.2017.1421776
<a id="4">[5]</a> A. Othmane, J. Rudolph, and H. Mounier, "Systematic comparison of numerical differentiators and an application to model-free control", Eur. J. Control. https://doi.org/10.1016/j.ejcon.2021.06.020
<a id="6">[6]</a> A. Othmane and J. Rudolph "AlgDiff: an open source toolbox for the design, analysis and discretisation of algebraic differentiators", at - Automatisierungstechnik https://doi.org/10.1515/auto-2023-0035 https://arxiv.org/abs/2308.04888
|
AlgDiff
|
/AlgDiff-2.2.1.tar.gz/AlgDiff-2.2.1/README.md
|
README.md
|
from cmath import sqrt
from random import randint
# --------------------- #
'''Criar vetor'''
# nulo
# unitario
# aleatorio
# oposto
'''Imprimir vetor'''
'''Igualdade de vetores'''
'''Calculos com vetores'''
# subtração
# soma
# produto escalar
# multiplicação por escalar
# divisao por escalar
# modulo
# --------------------- #
class Vetor:
# metodos
# o unico parametro é a dimensão, R², R³ ou maior
# se não passar nenhum parametro, retorna vetor de R²
def criar_vetor_nulo(self, dimensao=2):
vector = [0] * dimensao
return vector
def criar_vetor_unitario(self, dimensao=2):
vector = [1] * dimensao
return vector
# se não passar parametro retorna vetor do R² preenchido de forma aleatoria, entre 0 a 9
def criar_vetor_aleatorio(self, dimensao=2):
vector = []
for i in range(dimensao):
vector.append(randint(0, 9))
return vector
# para o vetor oposto tem que passar como parametro o vetor que quer o oposto dele
def criar_vetor_oposto(self, vector):
vetor_oposto = []
for i in range(len(vector)):
vetor_oposto.append(-vector[i])
return vetor_oposto
# imprime os vetores como a gente escreve no caderno
# aceita a passagem de vários vetores de vetores em casa posição, se booleano igual a True, ou de varios vetores se for False
def imprimir_vetor(self, plural=False, *args):
if (plural):
for linha_de_linha in range(len(args)):
for linha in range(len(args[linha_de_linha])):
print('(', end='')
for coluna in range(len(args[0][linha])):
if (coluna == (len(args[0][linha]) - 1)):
print(str(args[0][linha][coluna]) + ')', end='\n')
else:
print(str(args[0][linha][coluna]) + ',', end=' ')
else:
for linha in range(len(args)):
print('(', end='')
for coluna in range(len(args[linha])):
if (coluna == (len(args[linha]) - 1)):
print(str(args[linha][coluna]) + ')', end='\n')
else:
print(str(args[linha][coluna]) + ',', end=' ')
# aceita a passagem de mais de um vetor porém só compara dois vetores
def igualdade_de_vetores(self, *args):
tamanho = 0
if (len(args[0]) == len(args[1])):
for item in range(len(args[0])):
if args[0][item] == args[1][item]:
tamanho += 1
else:
break
if(tamanho == len(args[1])):
print("Vetores iguais", end='\n')
else:
print("Os vetores não são iguais", end='\n')
else:
print("Vetores de dimensões diferentes, então não são iguais", end='\n')
# subtração de vetores do mesmo tamanho, não aceita vetor de vetores
def subtracao_de_vetor(self, *args):
for i in range(len(args) - 1):
if (len(args[i]) != len(args[i + 1])):
print("Não subtrai vetores de tamanhos diferentes", end='\n')
return 0
vetor_subtracao = []
elemento = 0
for linha in range(len(args)):
for coluna in range(len(args[linha])):
if ((coluna and elemento) == 0):
elemento = args[coluna][linha]
else:
elemento -= args[coluna][linha]
vetor_subtracao.append(elemento)
return vetor_subtracao
# soma de vetores do mesmo tamanho, não aceita vetor de vetores
def soma_de_vetor(self, *args):
for i in range(len(args) - 1):
if (len(args[i]) != len(args[i + 1])):
print("Não soma vetores de tamanhos diferentes", end='\n')
return 0
vetor_soma = []
elemento = 0
for linha in range(len(args)):
for coluna in range(len(args[linha])):
if ((coluna and elemento) == 0):
elemento = args[coluna][linha]
else:
elemento += args[coluna][linha]
vetor_soma.append(elemento)
return vetor_soma
# multiplica cada posição dos vetores e soma com a próxima, x*x + y*y + z*z...
def produto_escalar_vetor(sef, *args):
for i in range(len(args) - 1):
if (len(args[i]) != len(args[i + 1])):
print("Não calcula o produto escalar de vetores de tamanhos diferentes", end='\n')
return 0
produto_escalar = 0
multiplicacao = 0
for items in range(len(args[0])):
for elementos in range(len(args)):
if (elementos == 0):
multiplicacao = args[elementos][items]
else:
multiplicacao *= args[elementos][items]
produto_escalar += multiplicacao
return produto_escalar
# ao inves de retornar, altera os valores dos vetores passados por parâmetro
def multiplicacao_por_escalar_vetor(self, *args, escalar):
for items in range(len(args)):
for elementos in range(len(args[items])):
args[items][elementos] *= escalar
# ao inves de retornar, altera os valores dos vetores passados por parâmetro
def divisao_por_escalar_vetor(self, *args, escalar):
for items in range(len(args)):
for elementos in range(len(args[items])):
args[items][elementos] /= escalar
# retorna uma lista com o módulo de cada vetor que foi passado como parametro nos seus respectivos indices
def modulo_vetor(self, *args):
modulo = []
quadrado = 0
for items in range(len(args)):
for elementos in range(len(args[items])):
if (elementos == 0):
quadrado = args[items][elementos] ** 2
else:
quadrado += args[items][elementos] ** 2
modulo.append(sqrt(quadrado))
return modulo
'''def angulo_entre_vetor(self, vetor1, vetor2, print=False, returner=True):
if (len(vetor1) == len(vetor2)):
cima = Vetor.produto_escalar_vetor(vetor1, vetor2)
baixo = Vetor.modulo_vetor(vetor1) * Vetor.modulo_vetor(vetor2)
resultado = cima / baixo
if (print):
print(f'{math.degrees(math.acos(resultado))} graus')
if (returner):
return math.degrees(math.acos(resultado))
else:
print("Não há como calcular vetores de dimensões diferentes", end='\n')'''
# class Matriz:
# -----TESTES----- #
ve = Vetor()
vetor1 = ve.criar_vetor_unitario(4)
ve.imprimir_vetor(vetor1)
vetor2 = ve.criar_vetor_oposto(vetor1)
ve.imprimir_vetor(vetor2)
|
Algebra-Linear
|
/Algebra_Linear-0.0.1-py3-none-any.whl/Algebra_Linear/Vetor.py
|
Vetor.py
|
from gettext import NullTranslations
import math
from operator import le
from random import randint
from math import sqrt
# Operações com Matrizes #
'''Criar Matriz'''
# nula
# unitaria
# random
# identidade
'''Imprimir'''
# imprime
'''Igualdade'''
# igualdade
'''Calculos com Matrizes'''
# soma
# subtração
# produto escalar
# multiplicação por escalar
# potencia
# traco
# determinante
# transposta
# simetrica
# ----------------------- #
def cria_matriz_aleatoria(linha, coluna):
matriz = []
for x in range(linha):
partes = []
for y in range(coluna):
partes.append(randint(0, 9))
matriz.append(partes)
return matriz
def cria_matriz_nula(linha, coluna):
matriz = []
for x in range(linha):
partes = []
for y in range(coluna):
partes.append(0)
matriz.append(partes)
return matriz
def cria_matriz_unitaria(linha, coluna):
matriz = []
for x in range(linha):
partes = []
for y in range(coluna):
partes.append(1)
matriz.append(partes)
return matriz
def cria_matriz_identidade(tamanho):
matriz = []
for x in range(tamanho):
partes = []
for y in range(tamanho):
if x == y:
partes.append(1)
else:
partes.append(0)
matriz.append(partes)
return matriz
def imprime_matriz(vetor):
for x in range(len(vetor)):
print('|', end='')
for y in range(len(vetor[0])):
print(vetor[x][y], end=" ")
print('|', end='')
print()
print()
def igualdade_matriz(matriz1, matriz2):
tamanho = 0
items = len(matriz1) * len(matriz1[0])
if ((len(matriz1) == len(matriz2)) and (len(matriz1[0]) == len(matriz2[0]))):
for l in range(len(matriz1)):
for c in range(len(matriz1[0])):
if (matriz1[l][c] == matriz2[l][c]):
tamanho += 1
else:
break
if(tamanho == items):
print("Matrizes iguais", end='\n')
else:
print("As matrizes não são iguais", end='\n')
else:
print("Matrizes de dimensões diferentes", end='\n')
def soma_matriz(a, b):
if (len(a) == len(b) and len(a[0]) == len(b[0])):
tamanho_linha = len(a)
tamanho_coluna = len(a[0])
matriz = cria_matriz_nula(tamanho_linha, tamanho_coluna)
for x in range(tamanho_linha):
for y in range(tamanho_coluna):
matriz[x][y] = a[x][y] + b[x][y]
return matriz
else:
print("Não há como calcular matrizes de dimensões diferentes", end='\n')
def subtracao_matriz(a, b):
if (len(a) == len(b) and len(a[0]) == len(b[0])):
tamanho_linha = len(a)
tamanho_coluna = len(a[0])
matriz = cria_matriz_nula(tamanho_linha, tamanho_coluna)
for x in range(tamanho_linha):
for y in range(tamanho_coluna):
matriz[x][y] = a[x][y] - b[x][y]
return matriz
else:
print("Não há como calcular matrizes de dimensões diferentes", end='\n')
def multiplicacao_matriz_por_escalar(matriz, escalar):
tamanho_linha = len(matriz)
tamanho_coluna = len(matriz[0])
for x in range(tamanho_linha):
for y in range(tamanho_coluna):
matriz[x][y] = matriz[x][y] * escalar
return matriz
def produto_escalar_matriz(matriz1, matriz2):
tamanho_coluna = len(matriz1[0])
tamanho_linha = len(matriz2)
if (tamanho_coluna == tamanho_linha):
matriz3 = cria_matriz_nula(len(matriz1), len(matriz2[0]))
for li in range(len(matriz1)):
for c in range(len(matriz2[0])):
for seg in range(tamanho_coluna):
matriz3[li][c] += matriz1[li][seg] * matriz2[seg][c]
return matriz3
else:
print("Não é possível", end='\n')
def potencia_matriz(matriz, expoente):
tamanho_linha = len(matriz)
tamanho_coluna = len(matriz[0])
resultado = matriz
if (tamanho_linha != tamanho_coluna):
print("Só há como calular matriz quadrada", end='\n')
elif expoente == 0:
return cria_matriz_identidade(tamanho_linha)
elif (expoente == 1):
return matriz
elif (expoente >= 2):
for i in range(1, expoente):
resultado = produto_escalar_matriz(resultado, matriz)
return resultado
else:
print("Não há como calcular matriz de expoente negativo")
def traco_matriz(matriz):
traco = 0
tamanho_linha = len(matriz)
tamanho_coluna = len(matriz[0])
for l in range(tamanho_linha):
for c in range(tamanho_coluna):
if l == c:
traco += matriz[l][c]
return traco
'''def determinante(matriz):
final = len(matriz[0]) - 1
diagonal_principal = 1
diagonal_secundaria = 1
for l in range(len(matriz)):
for c in range(len(matriz[0])):
if l == c:
diagonal_principal *= matriz[l][c]
diagonal_secundaria *= matriz[l][final]
final -= 1
return diagonal_principal - diagonal_secundaria'''
def transposta(matriz):
linha = len(matriz[0])
coluna = len(matriz)
matriz_transposta = cria_matriz_nula(linha, coluna)
for lin in range(linha):
for col in range(coluna):
matriz_transposta[lin][col] = matriz[col][lin]
return matriz_transposta
def matriz_simetrica(matriz1, matriz2):
saida = False
for linha in range(len(matriz1)):
for coluna in range(len(matriz1[0])):
if(matriz1[linha][coluna] == matriz2[coluna][linha]):
continue
else:
saida = True
print("Não são simétricas", end="\n")
break
if (saida):
break
if(not saida):
print("São simétricas", end="\n")
|
Algebra-Linear
|
/Algebra_Linear-0.0.1-py3-none-any.whl/Algebra_Linear/Matriz.py
|
Matriz.py
|
def algebra_with_sympy_preparser(lines):
"""
In IPython compatible environments (Jupyter, IPython, etc...) this supports
a special compact input method for equations.
The syntax supported is `equation_name =@ equation.lhs = equation.rhs`,
where `equation_name` is a valid Python name that can be used to refer to
the equation later. `equation.lhs` is the left-hand side of the equation
and `equation.rhs` is the right-hand side of the equation. Each side of the
equation must parse into a valid Sympy expression.
**Note**: This does not support line continuation. Long equations should be
built by combining expressions using names short enough to do this on one
line. The alternative is to use `equation_name = Eqn(long ...
expressions ... with ... multiple ... lines)`.
**Note**: If the `equation_name` is omitted the equation will be formed,
but it will not be assigned to a name that can be used to refer to it
later. You may be able to access it through one of the special IPython
underscore names. This is not recommended.
**THIS FUNCTION IS USED BY THE IPYTHON ENVIRONMENT TO PREPARSE THE INPUT
BEFORE IT IS PASSED TO THE PYTHON INTERPRETER. IT IS NOT MEANT TO BE USED
DIRECTLY BY A USER**
"""
new_lines = []
if isinstance(lines,str):
lines = [lines]
for k in lines:
if '=@' in k:
linesplit = k.split('=@')
eqsplit = linesplit[1].split('=')
if len(eqsplit)!=2:
raise ValueError('The two sides of the equation must be' \
' separated by an \"=\" sign when using' \
' the \"=@\" special input method.')
templine =''
if eqsplit[0]!='' and eqsplit[1]!='':
if eqsplit[1].endswith('\n'):
eqsplit[1] = eqsplit[1][:-1]
if linesplit[0]!='':
templine = str(linesplit[0])+'= Eqn('+str(eqsplit[0])+',' \
''+str(eqsplit[1])+')\n'
else:
templine = 'Eqn('+str(eqsplit[0])+','+str(eqsplit[1])+')\n'
new_lines.append(templine)
else:
new_lines.append(k)
return new_lines
def integers_as_exact(lines):
"""This preparser uses `sympy.interactive.session.int_to_Integer` to
convert numbers without decimal points into sympy integers so that math
on them will be exact rather than defaulting to floating point. **This
should not be called directly by the user. It is plugged into the
IPython preparsing sequence when the feature is requested.** The default for
Algebra_with_sympy is to use this preparser. This can be turned on and
off using the Algebra_with_sympy functions:
* `set_integers_as_exact()`
* `unset_integers_as_exact()`
"""
from sympy.interactive.session import int_to_Integer
string = ''
for k in lines:
string += k + '\n'
string = string[:-1] # remove the last '\n'
return int_to_Integer(string)
from IPython import get_ipython
if get_ipython():
if hasattr(get_ipython(),'input_transformers_cleanup'):
get_ipython().input_transformers_post.\
append(algebra_with_sympy_preparser)
else:
import warnings
warnings.warn('Compact equation input unavailable.\nYou will have ' \
'to use the form "eq1 = Eqn(lhs,rhs)" instead of ' \
'"eq1=@lhs=rhs".\nIt appears you are running an ' \
'outdated version of IPython.\nTo fix, update IPython ' \
'using "pip install -U IPython".')
|
Algebra-with-SymPy
|
/Algebra_with_SymPy-0.12.0.tar.gz/Algebra_with_SymPy-0.12.0/algebra_with_sympy/preparser.py
|
preparser.py
|
import sys
import sympy
from sympy.core.add import _unevaluated_Add
from sympy.core.expr import Expr
from sympy.core.basic import Basic
from sympy.core.evalf import EvalfMixin
from sympy.core.sympify import _sympify
from algebra_with_sympy.preparser import integers_as_exact
import functools
from sympy import *
class algwsym_config():
def __init__(self):
"""
This is a class to hold parameters that control behavior of
the algebra_with_sympy package.
Settings
========
Printing
--------
In interactive environments the default output of an equation is a
human readable string with the two sides connected by an equals
sign or a typeset equation with the two sides connected by an equals sign.
`print(Eqn)` or `str(Eqn)` will return this human readable text version of
the equation as well. This is consistent with python standards, but not
sympy, where `str()` is supposed to return something that can be
copy-pasted into code. If the equation has a declared name as in `eq1 =
Eqn(a,b/c)` the name will be displayed to the right of the equation in
parentheses (eg. `a = b/c (eq1)`). Use `print(repr(Eqn))` instead of
`print(Eqn)` or `repr(Eqn)` instead of `str(Eqn)` to get a code
compatible version of the equation.
You can adjust this behvior using some flags that impact output:
* `algwsym_config.output.show_code` default is `False`.
* `algwsym_config.output.human_text` default is `True`.
* `algwsym_config.output.label` default is `True`.
In interactive environments you can get both types of output by setting
the `algwsym_config.output.show_code` flag. If this flag is true
calls to `latex` and `str` will also print an additional line "code
version: `repr(Eqn)`". Thus in Jupyter you will get a line of typeset
mathematics output preceded by the code version that can be copy-pasted.
Default is `False`.
A second flag `algwsym_config.output.human_text` is useful in
text-based interactive environments such as command line python or
ipython. If this flag is true `repr` will return `str`. Thus the human
readable text will be printed as the output of a line that is an
expression containing an equation.
Default is `True`.
Setting both of these flags to true in a command line or ipython
environment will show both the code version and the human readable text.
These flags impact the behavior of the `print(Eqn)` statement.
The third flag `algwsym_config.output.label` has a default value of
`True`. Setting this to `False` suppresses the labeling of an equation
with its python name off to the right of the equation.
"""
pass
class output():
def __init__(self):
"""This holds settings that impact output.
"""
pass
@property
def show_code(self):
"""
If `True` code versions of the equation expression will be
output in interactive environments. Default = `False`.
"""
return self.show_code
@property
def human_text(self):
"""
If `True` the human readable equation expression will be
output in text interactive environments. Default = `False`.
"""
return self.human_text
@property
def solve_to_list(self):
"""
If `True` the results of a call to `solve(...)` will return a
Python `list` rather than a Sympy `FiniteSet`. This recovers
behavior for versions before 0.11.0.
Note: setting this `True` means that expressions within the
returned solutions will not be pretty-printed in Jupyter and
IPython.
"""
return self.solve_to_list
class numerics():
def __init__(self):
"""This class holds settings for how numerical computation and
inputs are handled.
"""
pass
def integers_as_exact(self):
"""**This is a flag for informational purposes and interface
consistency. Changing the value will not change the behavior.**
To change the behavior call:
* `unset_integers_as_exact()` to turn this feature off.
* `set_integers_as_exact()` to turn this feature on (on by
default).
If set to `True` (the default) and if running in an
IPython/Jupyter environment any number input without a decimal
will be interpreted as a sympy integer. Thus, fractions and
related expressions will not evalute to floating point numbers,
but be maintained as exact expressions (e.g. 2/3 -> 2/3 not the
float 0.6666...).
"""
return self.integers_as_exact
def __latex_override__(expr, *arg):
from IPython import get_ipython
show_code = False
if get_ipython():
algwsym_config = get_ipython().user_ns.get("algwsym_config", False)
else:
algwsym_config = globals()['algwsym_config']
if algwsym_config:
show_code = algwsym_config.output.show_code
if show_code:
print("Code version: " + repr(expr))
return '$'+latex(expr) + '$'
def __command_line_printing__(expr, *arg):
# print('Entering __command_line_printing__')
human_text = True
show_code = False
if algwsym_config:
human_text = algwsym_config.output.human_text
show_code = algwsym_config.output.show_code
tempstr = ''
if show_code:
tempstr += "Code version: " + repr(expr) + '\n'
if not human_text:
return print(tempstr + repr(expr))
else:
return print(tempstr + str(expr))
# Now we inject the formatting override(s)
from IPython import get_ipython
ip = get_ipython()
formatter = None
if ip:
# In an environment that can display typeset latex
formatter = ip.display_formatter
old = formatter.formatters['text/latex'].for_type(Basic,
__latex_override__)
# print("For type Basic overriding latex formatter = " + str(old))
# For the terminal based IPython
if "text/latex" not in formatter.active_types:
old = formatter.formatters['text/plain'].for_type(tuple,
__command_line_printing__)
# print("For type tuple overriding plain text formatter = " + str(old))
for k in sympy.__all__:
if k in globals() and not "Printer" in k:
if isinstance(globals()[k], type):
old = formatter.formatters['text/plain'].\
for_type(globals()[k], __command_line_printing__)
# print("For type "+str(k)+
# " overriding plain text formatter = " + str(old))
else:
# command line
# print("Overriding command line printing of python.")
sys.displayhook = __command_line_printing__
# Numerics controls
def set_integers_as_exact():
"""This operation uses `sympy.interactive.session.int_to_Integer`, which
causes any number input without a decimal to be interpreted as a sympy
integer, to pre-parse input cells. It also sets the flag
`algwsym_config.numerics.integers_as_exact = True` This is the default
mode of algebra_with_sympy. To turn this off call
`unset_integers_as_exact()`.
"""
from IPython import get_ipython
if get_ipython():
get_ipython().input_transformers_post.append(integers_as_exact)
algwsym_config = get_ipython().user_ns.get("algwsym_config", False)
if algwsym_config:
algwsym_config.numerics.integers_as_exact = True
else:
raise ValueError("The algwsym_config object does not exist.")
return
def unset_integers_as_exact():
"""This operation disables forcing of numbers input without
decimals being interpreted as sympy integers. Numbers input without a
decimal may be interpreted as floating point if they are part of an
expression that undergoes python evaluation (e.g. 2/3 -> 0.6666...). It
also sets the flag `algwsym_config.numerics.integers_as_exact = False`.
Call `set_integers_as_exact()` to avoid this conversion of rational
fractions and related expressions to floating point. Algebra_with_sympy
starts with `set_integers_as_exact()` enabled (
`algwsym_config.numerics.integers_as_exact = True`).
"""
from IPython import get_ipython
if get_ipython():
pre = get_ipython().input_transformers_post
# The below looks excessively complicated, but more reliably finds the
# transformer to remove across varying IPython environments.
for k in pre:
if "integers_as_exact" in k.__name__:
pre.remove(k)
algwsym_config = get_ipython().user_ns.get("algwsym_config", False)
if algwsym_config:
algwsym_config.numerics.integers_as_exact = False
else:
raise ValueError("The algwsym_config object does not exist.")
return
class Equation(Basic, EvalfMixin):
"""
This class defines an equation with a left-hand-side (tlhs) and a right-
hand-side (rhs) connected by the "=" operator (e.g. `p*V = n*R*T`).
Explanation
===========
This class defines relations that all high school and college students
would recognize as mathematical equations. At present only the "=" relation
operator is recognized.
This class is intended to allow using the mathematical tools in SymPy to
rearrange equations and perform algebra in a stepwise fashion. In this
way more people can successfully perform algebraic rearrangements without
stumbling over missed details such as a negative sign.
Create an equation with the call ``Equation(lhs,rhs)``, where ``lhs`` and
``rhs`` are any valid Sympy expression. ``Eqn(...)`` is a synonym for
``Equation(...)``.
Parameters
==========
lhs: sympy expression, ``class Expr``.
rhs: sympy expression, ``class Expr``.
kwargs:
Examples
========
NOTE: All the examples below are in vanilla python. You can get human
readable eqautions "lhs = rhs" in vanilla python by adjusting the settings
in `algwsym_config` (see it's documentation). Output is human readable by
default in IPython and Jupyter environments.
>>> from algebra_with_sympy import *
>>> a, b, c, x = var('a b c x')
>>> Equation(a,b/c)
Equation(a, b/c)
>>> t=Eqn(a,b/c)
>>> t
Equation(a, b/c)
>>> t*c
Equation(a*c, b)
>>> c*t
Equation(a*c, b)
>>> exp(t)
Equation(exp(a), exp(b/c))
>>> exp(log(t))
Equation(a, b/c)
Simplification and Expansion
>>> f = Eqn(x**2 - 1, c)
>>> f
Equation(x**2 - 1, c)
>>> f/(x+1)
Equation((x**2 - 1)/(x + 1), c/(x + 1))
>>> (f/(x+1)).simplify()
Equation(x - 1, c/(x + 1))
>>> simplify(f/(x+1))
Equation(x - 1, c/(x + 1))
>>> (f/(x+1)).expand()
Equation(x**2/(x + 1) - 1/(x + 1), c/(x + 1))
>>> expand(f/(x+1))
Equation(x**2/(x + 1) - 1/(x + 1), c/(x + 1))
>>> factor(f)
Equation((x - 1)*(x + 1), c)
>>> f.factor()
Equation((x - 1)*(x + 1), c)
>>> f2 = f+a*x**2+b*x +c
>>> f2
Equation(a*x**2 + b*x + c + x**2 - 1, a*x**2 + b*x + 2*c)
>>> collect(f2,x)
Equation(b*x + c + x**2*(a + 1) - 1, a*x**2 + b*x + 2*c)
Apply operation to only one side
>>> poly = Eqn(a*x**2 + b*x + c*x**2, a*x**3 + b*x**3 + c*x)
>>> poly.applyrhs(factor,x)
Equation(a*x**2 + b*x + c*x**2, x*(c + x**2*(a + b)))
>>> poly.applylhs(factor)
Equation(x*(a*x + b + c*x), a*x**3 + b*x**3 + c*x)
>>> poly.applylhs(collect,x)
Equation(b*x + x**2*(a + c), a*x**3 + b*x**3 + c*x)
``.apply...`` also works with user defined python functions
>>> def addsquare(eqn):
... return eqn+eqn**2
...
>>> t.apply(addsquare)
Equation(a**2 + a, b**2/c**2 + b/c)
>>> t.applyrhs(addsquare)
Equation(a, b**2/c**2 + b/c)
>>> t.apply(addsquare, side = 'rhs')
Equation(a, b**2/c**2 + b/c)
>>> t.applylhs(addsquare)
Equation(a**2 + a, b/c)
>>> addsquare(t)
Equation(a**2 + a, b**2/c**2 + b/c)
Inaddition to ``.apply...`` there is also the less general ``.do``,
``.dolhs``, ``.dorhs``, which only works for operations defined on the
``Expr`` class (e.g.``.collect(), .factor(), .expand()``, etc...).
>>> poly.dolhs.collect(x)
Equation(b*x + x**2*(a + c), a*x**3 + b*x**3 + c*x)
>>> poly.dorhs.collect(x)
Equation(a*x**2 + b*x + c*x**2, c*x + x**3*(a + b))
>>> poly.do.collect(x)
Equation(b*x + x**2*(a + c), c*x + x**3*(a + b))
>>> poly.dorhs.factor()
Equation(a*x**2 + b*x + c*x**2, x*(a*x**2 + b*x**2 + c))
``poly.do.exp()`` or other sympy math functions will raise an error.
Rearranging an equation (simple example made complicated as illustration)
>>> p, V, n, R, T = var('p V n R T')
>>> eq1=Eqn(p*V,n*R*T)
>>> eq1
Equation(V*p, R*T*n)
>>> eq2 =eq1/V
>>> eq2
Equation(p, R*T*n/V)
>>> eq3 = eq2/R/T
>>> eq3
Equation(p/(R*T), n/V)
>>> eq4 = eq3*R/p
>>> eq4
Equation(1/T, R*n/(V*p))
>>> 1/eq4
Equation(T, V*p/(R*n))
>>> eq5 = 1/eq4 - T
>>> eq5
Equation(0, -T + V*p/(R*n))
Substitution (#'s and units)
>>> L, atm, mol, K = var('L atm mol K', positive=True, real=True) # units
>>> eq2.subs({R:0.08206*L*atm/mol/K,T:273*K,n:1.00*mol,V:24.0*L})
Equation(p, 0.9334325*atm)
>>> eq2.subs({R:0.08206*L*atm/mol/K,T:273*K,n:1.00*mol,V:24.0*L}).evalf(4)
Equation(p, 0.9334*atm)
Substituting an equation into another equation:
>>> P, P1, P2, A1, A2, E1, E2 = symbols("P, P1, P2, A1, A2, E1, E2")
>>> eq1 = Eqn(P, P1 + P2)
>>> eq2 = Eqn(P1 / (A1 * E1), P2 / (A2 * E2))
>>> P1_val = (eq1 - P2).swap
>>> P1_val
Equation(P1, P - P2)
>>> eq2 = eq2.subs(P1_val)
>>> eq2
Equation((P - P2)/(A1*E1), P2/(A2*E2))
>>> P2_val = solve(eq2.subs(P1_val), P2).args[0]
>>> P2_val
Equation(P2, A2*E2*P/(A1*E1 + A2*E2))
Combining equations (Math with equations: lhs with lhs and rhs with rhs)
>>> q = Eqn(a*c, b/c**2)
>>> q
Equation(a*c, b/c**2)
>>> t
Equation(a, b/c)
>>> q+t
Equation(a*c + a, b/c + b/c**2)
>>> q/t
Equation(c, 1/c)
>>> t**q
Equation(a**(a*c), (b/c)**(b/c**2))
Utility operations
>>> t.reversed
Equation(b/c, a)
>>> t.swap
Equation(b/c, a)
>>> t.lhs
a
>>> t.rhs
b/c
>>> t.as_Boolean()
Eq(a, b/c)
`.check()` convenience method for `.as_Boolean().simplify()`
>>> from sympy import I, pi
>>> Equation(pi*(I+2), pi*I+2*pi).check()
True
>>> Eqn(a,a+1).check()
False
Differentiation
Differentiation is applied to both sides if the wrt variable appears on
both sides.
>>> q=Eqn(a*c, b/c**2)
>>> q
Equation(a*c, b/c**2)
>>> diff(q,b)
Equation(Derivative(a*c, b), c**(-2))
>>> diff(q,c)
Equation(a, -2*b/c**3)
>>> diff(log(q),b)
Equation(Derivative(log(a*c), b), 1/b)
>>> diff(q,c,2)
Equation(Derivative(a, c), 6*b/c**4)
If you specify multiple differentiation all at once the assumption
is order of differentiation matters and the lhs will not be
evaluated.
>>> diff(q,c,b)
Equation(Derivative(a*c, b, c), -2/c**3)
To overcome this specify the order of operations.
>>> diff(diff(q,c),b)
Equation(Derivative(a, b), -2/c**3)
But the reverse order returns an unevaulated lhs (a may depend on b).
>>> diff(diff(q,b),c)
Equation(Derivative(a*c, b, c), -2/c**3)
Integration can only be performed on one side at a time.
>>> q=Eqn(a*c,b/c)
>>> integrate(q,b,side='rhs')
b**2/(2*c)
>>> integrate(q,b,side='lhs')
a*b*c
Make a pretty statement of integration from an equation
>>> Eqn(Integral(q.lhs,b),integrate(q,b,side='rhs'))
Equation(Integral(a*c, b), b**2/(2*c))
Integration of each side with respect to different variables
>>> q.dorhs.integrate(b).dolhs.integrate(a)
Equation(a**2*c/2, b**2/(2*c))
Automatic solutions using sympy solvers. THIS IS EXPERIMENTAL. Please
report issues at https://github.com/gutow/Algebra_with_Sympy/issues.
>>> tosolv = Eqn(a - b, c/a)
>>> solve(tosolv,a)
FiniteSet(Equation(a, b/2 - sqrt(b**2 + 4*c)/2), Equation(a, b/2 + sqrt(b**2 + 4*c)/2))
>>> solve(tosolv, b)
FiniteSet(Equation(b, (a**2 - c)/a))
>>> solve(tosolv, c)
FiniteSet(Equation(c, a**2 - a*b))
"""
def __new__(cls, lhs, rhs, **kwargs):
lhs = _sympify(lhs)
rhs = _sympify(rhs)
if not isinstance(lhs, Expr) or not isinstance(rhs, Expr):
raise TypeError('lhs and rhs must be valid sympy expressions.')
return super().__new__(cls, lhs, rhs)
def _get_eqn_name(self):
"""
Tries to find the python string name that refers to the equation. In
IPython environments (IPython, Jupyter, etc...) looks in the user_ns.
If not in an IPython environment looks in __main__.
:return: string value if found or empty string.
"""
human_text = algwsym_config.output.human_text
algwsym_config.output.human_text=False
import __main__ as shell
for k in dir(shell):
item = getattr(shell,k)
if isinstance(item,Equation):
if item.__repr__()==self.__repr__() and not \
k.startswith('_'):
algwsym_config.output.human_text=human_text
return k
algwsym_config.output.human_text = human_text
return ''
@property
def lhs(self):
"""
Returns the lhs of the equation.
"""
return self.args[0]
@property
def rhs(self):
"""
Returns the rhs of the equation.
"""
return self.args[1]
def as_Boolean(self):
"""
Converts the equation to an Equality.
"""
return Equality(self.lhs, self.rhs)
def check(self, **kwargs):
"""
Forces simplification and casts as `Equality` to check validity.
Parameters
----------
kwargs any appropriate for `Equality`.
Returns
-------
True, False or an unevaluated `Equality` if truth cannot be determined.
"""
return Equality(self.lhs, self.rhs, **kwargs).simplify()
@property
def reversed(self):
"""
Swaps the lhs and the rhs.
"""
return Equation(self.rhs, self.lhs)
@property
def swap(self):
"""
Synonym for `.reversed`
"""
return self.reversed
def _applytoexpr(self, expr, func, *args, **kwargs):
# Applies a function to an expression checking whether there
# is a specialized version associated with the particular type of
# expression. Errors will be raised if the function cannot be
# applied to an expression.
funcname = getattr(func, '__name__', None)
if funcname is not None:
localfunc = getattr(expr, funcname, None)
if localfunc is not None:
return localfunc(*args, **kwargs)
return func(expr, *args, **kwargs)
def apply(self, func, *args, side='both', **kwargs):
"""
Apply an operation/function/method to the equation returning the
resulting equation.
Parameters
==========
func: object
object to apply usually a function
args: as necessary for the function
side: 'both', 'lhs', 'rhs', optional
Specifies which side of the equation the operation will be applied
to. Default is 'both'.
kwargs: as necessary for the function
"""
lhs = self.lhs
rhs = self.rhs
if side in ('both', 'lhs'):
lhs = self._applytoexpr(self.lhs, func, *args, **kwargs)
if side in ('both', 'rhs'):
rhs = self._applytoexpr(self.rhs, func, *args, **kwargs)
return Equation(lhs, rhs)
def applylhs(self, func, *args, **kwargs):
"""
If lhs side of the equation has a defined subfunction (attribute) of
name ``func``, that will be applied instead of the global function.
The operation is applied to only the lhs.
"""
return self.apply(func, *args, **kwargs, side='lhs')
def applyrhs(self, func, *args, **kwargs):
"""
If rhs side of the equation has a defined subfunction (attribute) of
name ``func``, that will be applied instead of the global function.
The operation is applied to only the rhs.
"""
return self.apply(func, *args, **kwargs, side='rhs')
class _sides:
"""
Helper class for the `.do.`, `.dolhs.`, `.dorhs.` syntax for applying
submethods of expressions.
"""
def __init__(self, eqn, side='both'):
self.eqn = eqn
self.side = side
def __getattr__(self, name):
func = None
if self.side in ('rhs', 'both'):
func = getattr(self.eqn.rhs, name, None)
else:
func = getattr(self.eqn.lhs, name, None)
if func is None:
raise AttributeError('Expressions in the equation have no '
'attribute `' + str(
name) + '`. Try `.apply('
+ str(name) + ', *args)` or '
'pass the equation as a parameter to `'
+ str(name) + '()`.')
return functools.partial(self.eqn.apply, func, side=self.side)
@property
def do(self):
return self._sides(self, side='both')
@property
def dolhs(self):
return self._sides(self, side='lhs')
@property
def dorhs(self):
return self._sides(self, side='rhs')
def _eval_rewrite(self, rule, args, **kwargs):
"""Return Equation(L, R) as Equation(L - R, 0) or as L - R.
Parameters
==========
evaluate : bool, optional
Control the evaluation of the result. If `evaluate=None` then
terms in L and R will not cancel but they will be listed in
canonical order; otherwise non-canonical args will be returned.
Default to True.
eqn : bool, optional
Control the returned type. If `eqn=True`, then Equation(L - R, 0)
is returned. Otherwise, the L - R symbolic expression is returned.
Default to True.
Examples
========
>>> from sympy import Add
>>> from sympy.abc import b, x
>>> from algebra_with_sympy import Equation
>>> eq = Equation(x + b, x - b)
>>> eq.rewrite(Add)
Equation(2*b, 0)
>>> eq.rewrite(Add, evaluate=None).lhs.args
(b, b, x, -x)
>>> eq.rewrite(Add, evaluate=False).lhs.args
(b, x, b, -x)
>>> eq.rewrite(Add, eqn=False)
2*b
>>> eq.rewrite(Add, eqn=False, evaluate=False).args
(b, x, b, -x)
"""
if rule == Add:
# NOTE: the code about `evaluate` is very similar to
# sympy.core.relational.Equality._eval_rewrite_as_Add
eqn = kwargs.pop("eqn", True)
evaluate = kwargs.get('evaluate', True)
L, R = args
if evaluate:
# allow cancellation of args
expr = L - R
else:
args = Add.make_args(L) + Add.make_args(-R)
if evaluate is None:
# no cancellation, but canonical
expr = _unevaluated_Add(*args)
else:
# no cancellation, not canonical
expr = Add._from_args(args)
if eqn:
return self.func(expr, 0)
return expr
def subs(self, *args, **kwargs):
"""Substitutes old for new in an equation after sympifying args.
`args` is either:
* one or more arguments of type `Equation(old, new)`.
* two arguments, e.g. foo.subs(old, new)
* one iterable argument, e.g. foo.subs(iterable). The iterable may be:
- an iterable container with (old, new) pairs. In this case the
replacements are processed in the order given with successive
patterns possibly affecting replacements already made.
- a dict or set whose key/value items correspond to old/new pairs.
In this case the old/new pairs will be sorted by op count and in
case of a tie, by number of args and the default_sort_key. The
resulting sorted list is then processed as an iterable container
(see previous).
If the keyword ``simultaneous`` is True, the subexpressions will not be
evaluated until all the substitutions have been made.
Please, read ``help(Expr.subs)`` for more examples.
Examples
========
>>> from sympy.abc import a, b, c, x
>>> from algebra_with_sympy import Equation
>>> eq = Equation(x + a, b * c)
Substitute a single value:
>>> eq.subs(b, 4)
Equation(a + x, 4*c)
Substitute a multiple values:
>>> eq.subs([(a, 2), (b, 4)])
Equation(x + 2, 4*c)
>>> eq.subs({a: 2, b: 4})
Equation(x + 2, 4*c)
Substitute an equation into another equation:
>>> eq2 = Equation(x + a, 4)
>>> eq.subs(eq2)
Equation(4, b*c)
Substitute multiple equations into another equation:
>>> eq1 = Equation(x + a + b + c, x * a * b * c)
>>> eq2 = Equation(x + a, 4)
>>> eq3 = Equation(b, 5)
>>> eq1.subs(eq2, eq3)
Equation(c + 9, 5*a*c*x)
"""
new_args = args
if all(isinstance(a, self.func) for a in args):
new_args = [{a.args[0]: a.args[1] for a in args}]
elif (len(args) == 1) and all(isinstance(a, self.func) for a in
args[0]):
raise TypeError("You passed into `subs` a list of elements of "
"type `Equation`, but this is not supported. Please, consider "
"unpacking the list with `.subs(*eq_list)` or select your "
"equations from the list and use `.subs(eq_list[0], eq_list["
"2], ...)`.")
elif any(isinstance(a, self.func) for a in args):
raise ValueError("`args` contains one or more Equation and some "
"other data type. This mode of operation is not supported. "
"Please, read `subs` documentation to understand how to "
"use it.")
return super().subs(*new_args, **kwargs)
#####
# Overrides of binary math operations
#####
@classmethod
def _binary_op(cls, a, b, opfunc_ab):
if isinstance(a, Equation) and not isinstance(b, Equation):
return Equation(opfunc_ab(a.lhs, b), opfunc_ab(a.rhs, b))
elif isinstance(b, Equation) and not isinstance(a, Equation):
return Equation(opfunc_ab(a, b.lhs), opfunc_ab(a, b.rhs))
elif isinstance(a, Equation) and isinstance(b, Equation):
return Equation(opfunc_ab(a.lhs, b.lhs), opfunc_ab(a.rhs, b.rhs))
else:
return NotImplemented
def __add__(self, other):
return self._binary_op(self, other, lambda a, b: a + b)
def __radd__(self, other):
return self._binary_op(other, self, lambda a, b: a + b)
def __mul__(self, other):
return self._binary_op(self, other, lambda a, b: a * b)
def __rmul__(self, other):
return self._binary_op(other, self, lambda a, b: a * b)
def __sub__(self, other):
return self._binary_op(self, other, lambda a, b: a - b)
def __rsub__(self, other):
return self._binary_op(other, self, lambda a, b: a - b)
def __truediv__(self, other):
return self._binary_op(self, other, lambda a, b: a / b)
def __rtruediv__(self, other):
return self._binary_op(other, self, lambda a, b: a / b)
def __mod__(self, other):
return self._binary_op(self, other, lambda a, b: a % b)
def __rmod__(self, other):
return self._binary_op(other, self, lambda a, b: a % b)
def __pow__(self, other):
return self._binary_op(self, other, lambda a, b: a ** b)
def __rpow__(self, other):
return self._binary_op(other, self, lambda a, b: a ** b)
def _eval_power(self, other):
return self.__pow__(other)
#####
# Operation helper functions
#####
def expand(self, *args, **kwargs):
return Equation(self.lhs.expand(*args, **kwargs), self.rhs.expand(
*args, **kwargs))
def simplify(self, *args, **kwargs):
return self._eval_simplify(*args, **kwargs)
def _eval_simplify(self, *args, **kwargs):
return Equation(self.lhs.simplify(*args, **kwargs), self.rhs.simplify(
*args, **kwargs))
def _eval_factor(self, *args, **kwargs):
# TODO: cancel out factors common to both sides.
return Equation(self.lhs.factor(*args, **kwargs), self.rhs.factor(
*args, **kwargs))
def factor(self, *args, **kwargs):
return self._eval_factor(*args, **kwargs)
def _eval_collect(self, *args, **kwargs):
from sympy.simplify.radsimp import collect
return Equation(collect(self.lhs, *args, **kwargs),
collect(self.rhs, *args, **kwargs))
def collect(self, *args, **kwargs):
return self._eval_collect(*args, **kwargs)
def evalf(self, *args, **kwargs):
return Equation(self.lhs.evalf(*args, **kwargs),
self.rhs.evalf(*args, **kwargs))
n = evalf
def _eval_derivative(self, *args, **kwargs):
# TODO Find why diff and Derivative do not appear to pass through
# kwargs to this. Since we cannot set evaluation of lhs manually
# try to be intelligent about when to do it.
from sympy.core.function import Derivative
eval_lhs = False
if not (isinstance(self.lhs, Derivative)):
for sym in args:
if sym in self.lhs.free_symbols and not (
_sympify(sym).is_number):
eval_lhs = True
return Equation(self.lhs.diff(*args, **kwargs, evaluate=eval_lhs),
self.rhs.diff(*args, **kwargs))
def _eval_Integral(self, *args, **kwargs):
side = kwargs.pop('side', None) # Could not seem to pass values for
# `evaluate` through to here.
if side is None:
raise ValueError('You must specify `side="lhs"` or `side="rhs"` '
'when integrating an Equation')
else:
try:
return (getattr(self, side).integrate(*args, **kwargs))
except AttributeError:
raise AttributeError('`side` must equal "lhs" or "rhs".')
#####
# Output helper functions
#####
def __repr__(self):
repstr = 'Equation(%s, %s)' %(self.lhs.__repr__(), self.rhs.__repr__())
# if algwsym_config.output.human_text:
# return self.__str__()
return repstr
def _latex(self, printer):
tempstr = ''
"""
if algwsym_config.output.show_code and not \
algwsym_config.output.human_text:
print('code version: '+ self.__repr__())
"""
tempstr += printer._print(self.lhs)
tempstr += '='
tempstr += printer._print(self.rhs)
namestr = self._get_eqn_name()
if namestr !='' and algwsym_config.output.label:
tempstr += '\\,\\,\\,\\,\\,\\,\\,\\,\\,\\,'
tempstr += '(\\text{'+namestr+'})'
return tempstr
def __str__(self):
tempstr = ''
# if algwsym_config.output.show_code:
# human_text = algwsym_config.output.human_text
# algwsym_config.output.human_text=False
# tempstr += '\ncode version: '+self.__repr__() +'\n'
# algwsym_config.output.human_text=human_text
tempstr += str(self.lhs) + ' = ' + str(self.rhs)
namestr = self._get_eqn_name()
if namestr != '' and algwsym_config.output.label:
tempstr += ' (' + namestr + ')'
return tempstr
Eqn = Equation
if ip and "text/latex" not in formatter.active_types:
old = formatter.formatters['text/plain'].for_type(Eqn,
__command_line_printing__)
# print("For type Equation overriding plain text formatter = " + str(old))
def solve(f, *symbols, **flags):
"""
Override of sympy `solve()`.
If passed an expression and variable(s) to solve for it behaves
almost the same as normal solve with `dict = True`, except that solutions
are wrapped in a FiniteSet() to guarantee that the output will be pretty
printed in Jupyter like environments.
If passed an equation or equations it returns solutions as a
`FiniteSet()` of solutions, where each solution is represented by an
equation or set of equations.
To get a Python `list` of solutions (pre-0.11.0 behavior) rather than a
`FiniteSet` issue the command `algwsym_config.output.solve_to_list = True`.
This also prevents pretty-printing in IPython and Jupyter.
Examples
--------
>>> a, b, c, x, y = symbols('a b c x y', real = True)
>>> import sys
>>> sys.displayhook = __command_line_printing__ # set by default on normal initialization.
>>> eq1 = Eqn(abs(2*x+y),3)
>>> eq2 = Eqn(abs(x + 2*y),3)
>>> B = solve((eq1,eq2))
Default human readable output on command line
>>> B
{{x = -3, y = 3}, {x = -1, y = -1}, {x = 1, y = 1}, {x = 3, y = -3}}
To get raw output turn off by setting
>>> algwsym_config.output.human_text=False
>>> B
FiniteSet(FiniteSet(Equation(x, -3), Equation(y, 3)), FiniteSet(Equation(x, -1), Equation(y, -1)), FiniteSet(Equation(x, 1), Equation(y, 1)), FiniteSet(Equation(x, 3), Equation(y, -3)))
Pre-0.11.0 behavior where a python list of solutions is returned
>>> algwsym_config.output.solve_to_list = True
>>> solve((eq1,eq2))
[[Equation(x, -3), Equation(y, 3)], [Equation(x, -1), Equation(y, -1)], [Equation(x, 1), Equation(y, 1)], [Equation(x, 3), Equation(y, -3)]]
>>> algwsym_config.output.solve_to_list = False # reset to default
`algwsym_config.output.human_text = True` with
`algwsym_config.output.how_code=True` shows both.
In Jupyter-like environments `show_code=True` yields the Raw output and
a typeset version. If `show_code=False` (the default) only the
typeset version is shown in Jupyter.
>>> algwsym_config.output.show_code=True
>>> algwsym_config.output.human_text=True
>>> B
Code version: FiniteSet(FiniteSet(Equation(x, -3), Equation(y, 3)), FiniteSet(Equation(x, -1), Equation(y, -1)), FiniteSet(Equation(x, 1), Equation(y, 1)), FiniteSet(Equation(x, 3), Equation(y, -3)))
{{x = -3, y = 3}, {x = -1, y = -1}, {x = 1, y = 1}, {x = 3, y = -3}}
"""
from sympy.solvers.solvers import solve
from sympy.sets.sets import FiniteSet
from IPython.display import display
newf =[]
solns = []
displaysolns = []
contains_eqn = False
if hasattr(f,'__iter__'):
for k in f:
if isinstance(k, Equation):
newf.append(k.lhs-k.rhs)
contains_eqn = True
else:
newf.append(k)
else:
if isinstance(f, Equation):
newf.append(f.lhs - f.rhs)
contains_eqn = True
else:
newf.append(f)
flags['dict'] = True
result = solve(newf, *symbols, **flags)
if contains_eqn:
if len(result[0]) == 1:
for k in result:
for key in k.keys():
val = k[key]
tempeqn = Eqn(key, val)
solns.append(tempeqn)
else:
for k in result:
solnset = []
for key in k.keys():
val = k[key]
tempeqn = Eqn(key, val)
solnset.append(tempeqn)
if not algwsym_config.output.solve_to_list:
solnset = FiniteSet(*solnset)
solns.append(solnset)
else:
solns = result
if algwsym_config.output.solve_to_list:
return list(solns)
else:
return FiniteSet(*solns)
def solveset(f, symbols, domain=sympy.Complexes):
"""
Very experimental override of sympy solveset, which we hope will replace
solve. Much is not working. It is not clear how to input a system of
equations unless you directly select `linsolve`, etc...
"""
from sympy.solvers import solveset as solve
from IPython.display import display
newf = []
solns = []
displaysolns = []
contains_eqn = False
if hasattr(f, '__iter__'):
for k in f:
if isinstance(k, Equation):
newf.append(k.lhs - k.rhs)
contains_eqn = True
else:
newf.append(k)
else:
if isinstance(f, Equation):
newf.append(f.lhs - f.rhs)
contains_eqn = True
else:
newf.append(f)
result = solve(*newf, symbols, domain=domain)
# if contains_eqn:
# if len(result[0]) == 1:
# for k in result:
# for key in k.keys():
# val = k[key]
# tempeqn = Eqn(key, val)
# solns.append(tempeqn)
# display(*solns)
# else:
# for k in result:
# solnset = []
# displayset = []
# for key in k.keys():
# val = k[key]
# tempeqn = Eqn(key, val)
# solnset.append(tempeqn)
# if algwsym_config.output.show_solve_output:
# displayset.append(tempeqn)
# if algwsym_config.output.show_solve_output:
# displayset.append('-----')
# solns.append(solnset)
# if algwsym_config.output.show_solve_output:
# for k in displayset:
# displaysolns.append(k)
# if algwsym_config.output.show_solve_output:
# display(*displaysolns)
# else:
solns = result
return solns
def sqrt(arg, evaluate = None):
"""
Override of sympy convenience function `sqrt`. Simply divides equations
into two sides if `arg` is an instance of `Equation`. This avoids an
issue with the way sympy is delaying specialized applications of _Pow_ on
objects that are not basic sympy expressions.
"""
from sympy.functions.elementary.miscellaneous import sqrt as symsqrt
if isinstance(arg, Equation):
return Equation(symsqrt(arg.lhs, evaluate), symsqrt(arg.rhs, evaluate))
else:
return symsqrt(arg,evaluate)
# Pick up the docstring for sqrt from sympy
from sympy.functions.elementary.miscellaneous import sqrt as symsqrt
sqrt.__doc__+=symsqrt.__doc__
del symsqrt
def root(arg, n, k = 0, evaluate = None):
"""
Override of sympy convenience function `root`. Simply divides equations
into two sides if `arg` or `n` is an instance of `Equation`. This
avoids an issue with the way sympy is delaying specialized applications
of _Pow_ on objects that are not basic sympy expressions.
"""
from sympy.functions.elementary.miscellaneous import root as symroot
if isinstance(arg, Equation):
return Equation(symroot(arg.lhs, n, k, evaluate),
symroot(arg.rhs, n, k, evaluate))
if isinstance(n, Equation):
return Equation(symroot(arg, n.lhs, k, evaluate),
symroot(arg, n.rhs, k, evaluate))
else:
return symroot(arg, n, k, evaluate)
# pick up the docstring for root from sympy
from sympy.functions.elementary.miscellaneous import root as symroot
root.__doc__+=symroot.__doc__
del symroot
def Heaviside(arg, **kwargs):
"""
Overide of the Heaviside function as implemented in Sympy. Get a recursion
error if use the normal class extension of a function to do this.
"""
from sympy.functions.special.delta_functions import Heaviside as symHeav
if isinstance(arg, Equation):
return Equation(symHeav((arg.lhs), **kwargs),symHeav((arg.rhs),
**kwargs))
else:
return symHeav(arg, **kwargs)
# Pick up the docstring for Heaviside from Sympy.
from sympy.functions.special.delta_functions import Heaviside as symHeav
Heaviside.__doc__ += symHeav.__doc__
del symHeav
def collect(expr, syms, func=None, evaluate=None, exact=False,
distribute_order_term=True):
"""
Override of sympy `collect()`.
"""
from sympy.simplify.radsimp import collect
_eval_collect = getattr(expr, '_eval_collect', None)
if _eval_collect is not None:
return _eval_collect(syms, func, evaluate,
exact, distribute_order_term)
else:
return collect(expr, syms, func, evaluate, exact,
distribute_order_term)
class Equality(Equality):
"""
Extension of Equality class to include the ability to convert it to an
Equation.
"""
def to_Equation(self):
"""
Return: recasts the Equality as an Equation.
"""
return Equation(self.lhs,self.rhs)
def to_Eqn(self):
"""
Synonym for to_Equation.
Return: recasts the Equality as an Equation.
"""
return self.to_Equation()
Eq = Equality
def __FiniteSet__repr__override__(self):
"""Override of the `FiniteSet.__repr__(self)` to overcome sympy's
inconsistent wrapping of Finite Sets which prevents reliable use of
copy and paste of the code representation.
"""
insidestr = ""
for k in self.args:
insidestr += k.__repr__() +', '
insidestr = insidestr[:-2]
reprstr = "FiniteSet("+ insidestr + ")"
return reprstr
sympy.sets.FiniteSet.__repr__ = __FiniteSet__repr__override__
def __FiniteSet__str__override__(self):
"""Override of the `FiniteSet.__str__(self)` to overcome sympy's
inconsistent wrapping of Finite Sets which prevents reliable use of
copy and paste of the code representation.
"""
insidestr = ""
for k in self.args:
insidestr += str(k) + ', '
insidestr = insidestr[:-2]
strrep = "{"+ insidestr + "}"
return strrep
sympy.sets.FiniteSet.__str__ = __FiniteSet__str__override__
#####
# Extension of the Function class. For incorporation into SymPy this should
# become part of the class
#####
class EqnFunction(Function):
"""
Extension of the sympy Function class to understand equations. Each
sympy function impacted by this extension is listed in the documentation
that follows.
"""
def __new__(cls, *args, **kwargs):
n = len(args)
eqnloc = None
neqns = 0
newargs = []
for k in args:
newargs.append(k)
if (n > 0):
for i in range(n):
if isinstance(args[i], Equation):
neqns += 1
eqnloc = i
if neqns > 1:
raise NotImplementedError('Function calls with more than one '
'Equation as a parameter are not '
'supported. You may be able to get '
'your desired outcome using .applyrhs'
' and .applylhs.')
if neqns == 1:
newargs[eqnloc] = args[eqnloc].lhs
lhs = super().__new__(cls, *newargs, **kwargs)
newargs[eqnloc] = args[eqnloc].rhs
rhs = super().__new__(cls, *newargs, **kwargs)
return Equation(lhs,rhs)
return super().__new__(cls, *args, **kwargs)
def str_to_extend_sympy_func(func:str):
"""
Generates the string command to execute for a sympy function to
gain the properties of the extended EqnFunction class.
"""
execstr = 'class ' + str(func) + '(' + str(
func) + ',EqnFunction):\n ' \
'pass\n'
return execstr
# TODO: Below will not be needed when incorporated into SymPy.
# This is hacky, but I have not been able to come up with another way
# of extending the functions programmatically, if this is separate package
# from sympy that extends it after loading sympy.
# Functions listed in `skip` are not applicable to equations or cannot be
# extended because of `mro` error or `metaclass conflict`. This reflects
# that some of these are not members of the Sympy Function class.
# Overridden elsewhere
_extended_ = ('sqrt', 'root', 'Heaviside')
# Either not applicable to equations or have not yet figured out a way
# to systematically apply to an equation.
# TODO examine these more carefully (top priority: real_root, cbrt, Ynm_c).
_not_applicable_to_equations_ = ('Min', 'Max', 'Id', 'real_root', 'cbrt',
'unbranched_argument', 'polarify', 'unpolarify',
'piecewise_fold', 'E1', 'Eijk', 'bspline_basis',
'bspline_basis_set', 'interpolating_spline', 'jn_zeros',
'jacobi_normalized', 'Ynm_c', 'piecewise_exclusive', 'Piecewise',
'motzkin', 'hyper','meijerg', 'chebyshevu_root', 'chebyshevt_root',
'betainc_regularized')
_skip_ = _extended_ + _not_applicable_to_equations_
for func in functions.__all__:
if func not in _skip_:
try:
exec(str_to_extend_sympy_func(func), globals(), locals())
except TypeError:
from warnings import warn
warn('SymPy function/operation ' + str(func) + ' may not work ' \
'properly with Equations. If you use it with Equations, ' \
'validate its behavior. We are working to address this ' \
'issue.')
# Redirect python abs() to Abs()
abs = Abs
|
Algebra-with-SymPy
|
/Algebra_with_SymPy-0.12.0.tar.gz/Algebra_with_SymPy-0.12.0/algebra_with_sympy/algebraic_equation.py
|
algebraic_equation.py
|
from typing import Union
import lab as B
from plum import Dispatcher
__all__ = ["squeeze", "get_subclasses", "broadcast", "identical", "to_tensor"]
_dispatch = Dispatcher()
def squeeze(xs):
"""Squeeze a sequence if it only contains a single element.
Args:
xs (sequence): Sequence to squeeze.
Returns:
object: `xs[0]` if `xs` consists of a single element and `xs` otherwise.
"""
return xs[0] if len(xs) == 1 else xs
def get_subclasses(c):
"""Get all subclasses of a class.
Args:
c (type): Class to get subclasses of.
Returns:
list[type]: List of subclasses of `c`.
"""
scs = c.__subclasses__()
return scs + [x for sc in scs for x in get_subclasses(sc)]
def broadcast(op, xs, ys):
"""Perform a binary operation `op` on elements of `xs` and `ys`. If `xs` or
`ys` has length 1, then it is repeated sufficiently many times to match the
length of the other.
Args:
op (function): Binary operation.
xs (sequence): First sequence.
ys (sequence): Second sequence.
Returns:
tuple: Result of applying `op` to every element of `zip(xs, ys)` after
broadcasting appropriately.
"""
if len(xs) == 1 and len(ys) > 1:
# Broadcast `xs`.
xs = xs * len(ys)
elif len(ys) == 1 and len(xs) > 1:
# Broadcast `ys.
ys = ys * len(xs)
# Check that `xs` and `ys` are compatible now.
if len(xs) != len(ys):
raise ValueError(f'Inputs "{xs}" and "{ys}" could not be broadcasted.')
# Perform operation.
return tuple(op(x, y) for x, y in zip(xs, ys))
@_dispatch
def identical(x, y):
"""Check if two objects `x` are `y` are identical for the purpose of algebraic
simplification.
Args:
x (object): First object.
y (object): Second object.
Returns:
bool: `x` and `y` are identical.
"""
return x is y
@_dispatch
def identical(x: Union[int, float], y: Union[int, float]):
return x == y
@_dispatch.multi((tuple, tuple), (list, list))
def identical(x: Union[tuple, list], y: Union[tuple, list]):
return len(x) == len(y) and all([identical(xi, yi) for xi, yi in zip(x, y)])
@_dispatch
def to_tensor(x: B.Numeric):
"""Convert object to tensor.
Args:
x (object): Object to convert to tensor.
Returns:
tensor: `x` as a tensor.
"""
return x
@_dispatch
def to_tensor(x: Union[tuple, list]):
return B.stack(*x, axis=0)
|
Algebra2x
|
/Algebra2x-0.1-py3-none-any.whl/algebra2x/util.py
|
util.py
|
from . import _dispatch
from .algebra import Element, One, Zero, Wrapped, Join
from .ops.add import Sum
from .ops.mul import Scaled, Product
__all__ = [
"Function",
"OneFunction",
"ZeroFunction",
"WrappedFunction",
"ScaledFunction",
"JoinFunction",
"SumFunction",
"ProductFunction",
"stretch",
"shift",
"select",
"transform",
"diff",
"reverse",
]
class Function(Element):
"""A elements."""
def stretch(self, *stretches):
"""Stretch the elements.
Args:
*stretches (tensor): Per input, extent to stretch by.
Returns:
:class:`.elements.Function`: Stretched elements.
"""
return stretch(self, *stretches)
def shift(self, *amounts):
"""Shift the inputs of an elements by a certain amount.
Args:
*amounts (tensor): Per input, amount to shift by.
Returns:
:class:`.elements.Function`: Shifted elements.
"""
return shift(self, *amounts)
def select(self, *dims):
"""Select particular dimensions of the input features.
Args:
*dims (int, sequence, or None): Per input, dimensions to select.
Set to `None` to select all.
Returns:
:class:`.elements.Function`: Function with dimensions of the
input features selected.
"""
return select(self, *dims)
def transform(self, *fs):
"""Transform the inputs of a elements.
Args:
*fs (function or None): Per input, transformation. Set to `None` to
not perform a transformation.
Returns:
:class:`.elements.Function`: Function with its inputs
transformed.
"""
return transform(self, *fs)
def diff(self, *derivs):
"""Differentiate a elements.
Args:
*derivs (int): Per input, dimension of the feature which to take
the derivatives with respect to. Set to `None` to not take a
derivative.
Returns:
:class:`.elements.Function`: Derivative of the Function.
"""
return diff(self, *derivs)
def __reversed__(self):
"""Reverse the arguments of a elements.
Returns:
:class:`.elements.Function`: Function with arguments reversed.
"""
return reverse(self)
# Register the algebra.
@_dispatch
def get_algebra(a: Function):
return Function
class OneFunction(Function, One):
"""The constant elements `1`."""
class ZeroFunction(Function, Zero):
"""The constant elements `0`."""
class WrappedFunction(Function, Wrapped):
"""A wrapped elements."""
class ScaledFunction(Function, Scaled):
"""A scaled elements."""
class JoinFunction(Function, Join):
"""Two wrapped functions."""
class SumFunction(Function, Sum):
"""A sum of two functions."""
class ProductFunction(Function, Product):
"""A product of two functions."""
@_dispatch
def stretch(a, *stretches):
"""Stretch a elements.
Args:
a (:class:`.elements.Function`): Function to stretch.
*stretches (tensor): Per input, extent of stretches.
Returns:
:class:`.elements.Function`: Stretched elements.
"""
raise NotImplementedError(f'Stretching not implemented for "{type(a).__name__}".')
@_dispatch
def shift(a, *shifts):
"""Shift a elements.
Args:
a (:class:`.elements.Function`): Function to shift.
*shifts (tensor): Per input, amount of shift.
Returns:
:class:`.elements.Function`: Shifted element.
"""
raise NotImplementedError(f'Shifting not implemented for "{type(a).__name__}".')
@_dispatch
def select(a, *dims):
"""Select dimensions from the inputs.
Args:
a (:class:`.elements.Function`): Function to wrap.
*dims (int): Per input, dimensions to select. Set to `None` to select
all.
Returns:
:class:`.elements.Function`: Function with particular dimensions
from the inputs selected.
"""
raise NotImplementedError(f'Selection not implemented for "{type(a).__name__}".')
@_dispatch
def transform(a, *fs):
"""Transform the inputs of a elements.
Args:
a (:class:`.elements.Function`): Function to wrap.
*fs (int): Per input, the transform. Set to `None` to not perform a
transform.
Returns:
:class:`.elements.Function`: Function with its inputs
transformed.
"""
raise NotImplementedError(
f'Input transforms not implemented for "{type(a).__name__}".'
)
@_dispatch
def diff(a, *derivs):
"""Differentiate a elements.
Args:
a (:class:`.elements.Function`): Function to differentiate.
*derivs (int): Per input, dimension of the feature which to take
the derivatives with respect to. Set to `None` to not take a
derivative.
Returns:
:class:`.elements.Function`: Derivative of the elements.
"""
raise NotImplementedError(
f'Differentiation not implemented for "{type(a).__name__}".'
)
@_dispatch
def reverse(a):
"""Reverse argument of a elements.
Args:
a (:class:`.elements.Function`): Function to reverse arguments of.
Returns:
:class:`.elements.Function`: Function with arguments reversed.
"""
raise NotImplementedError(
f'Argument reversal not implemented for "{type(a).__name__}".'
)
|
Algebra2x
|
/Algebra2x-0.1-py3-none-any.whl/algebra2x/function.py
|
function.py
|
from abc import ABCMeta, abstractmethod
from . import _dispatch
from .util import get_subclasses
__all__ = [
"proven",
"Element",
"One",
"Zero",
"Wrapped",
"Join",
"pretty_print",
"add",
"mul",
"get_algebra",
"new",
]
_proven_level = 10 #: Current precedence level for proven methods.
def proven():
"""Generate a method precedence level for proven methods. Proven methods
should be such that any applicable one gives the same result, and in case
of ambiguity no particular proven method is preferred.
Returns:
int: Precedence level.
"""
global _proven_level
_proven_level += 1
return _proven_level
class Element(metaclass=ABCMeta):
"""An element in a algebra.
Elements can be added and multiplied.
"""
def __eq__(self, other):
return self is other
def __mul__(self, other):
return mul(self, other)
def __rmul__(self, other):
return mul(other, self)
def __add__(self, other):
return add(self, other)
def __radd__(self, other):
return add(other, self)
def __neg__(self):
return mul(-1, self)
def __sub__(self, other):
return add(self, -other)
def __rsub__(self, other):
return add(other, -self)
@_dispatch
def __pow__(self, power: int, modulo=None):
if power < 0:
raise ValueError("Cannot raise to a negative power.")
elif power == 0:
return 1
else:
k = self
for _ in range(power - 1):
k *= self
return k
@property
def num_terms(self):
"""Number of terms"""
return 1
def term(self, i):
"""Get a specific term.
Args:
i (int): Index of term.
Returns:
:class:`.algebra.Element`: The referenced term.
"""
if i == 0:
return self
else:
raise IndexError("Index out of range.")
@property
def num_factors(self):
"""Number of factors"""
return 1
def factor(self, i):
"""Get a specific factor.
Args:
i (int): Index of factor.
Returns:
:class:`.algebra.Element`: The referenced factor.
"""
if i == 0:
return self
else:
raise IndexError("Index out of range.")
@property
def __name__(self):
return self.__class__.__name__
def __repr__(self):
return self.display()
def __str__(self):
return self.display()
@_dispatch
def display(self, formatter):
"""Display the element.
Args:
formatter (object, optional): Function to format values.
Returns:
str: Element as a string.
"""
return pretty_print(self, formatter)
@_dispatch
def display(self):
return self.display(lambda x: x)
def render(self, formatter):
"""Render the element.
This is the lowest-level operation in pretty printing an element, and should
produce a string representation of the element. This method should be
implemented to determine how to render a custom element.
Args:
formatter (elements, optional): Function to format values.
Returns:
str: Rendering of the element.
"""
return f"{self.__name__}()"
class One(Element):
"""The constant `1`."""
def render(self, formatter):
return "1"
@_dispatch
def __eq__(self, other: "One"):
return True
class Zero(Element):
"""The constant `0`."""
def render(self, formatter):
return "0"
@_dispatch
def __eq__(self, other: "Zero"):
return True
class Wrapped(Element):
"""A wrapped element.
Args:
e (:class:`.algebra.Element`): Element to wrap.
"""
def __init__(self, e):
self.e = e
def __getitem__(self, item):
if item == 0:
return self.e
else:
raise IndexError("Index out of range.")
@abstractmethod
def render_wrap(self, e, formatter): # pragma: no cover
pass
class Join(Element):
"""Two wrapped elements.
Args:
e1 (:class:`.algebra.Element`): First element to wrap.
e2 (:class:`.algebra.Element`): Second element to wrap.
"""
def __init__(self, e1, e2):
self.e1 = e1
self.e2 = e2
def __getitem__(self, item):
if item == 0:
return self.e1
elif item == 1:
return self.e2
else:
raise IndexError("Index out of range.")
@abstractmethod
def render_join(self, e1, e2, formatter): # pragma: no cover
pass
@_dispatch
def pretty_print(el: Element, formatter):
"""Pretty print an element with a minimal number of parentheses.
Args:
el (:class:`.algebra.Element`): Element to print.
formatter (object): Formatter for values.
Returns:
str: `el` converted to string prettily.
"""
return el.render(formatter)
@_dispatch
def add(a, b):
"""Add two elements.
Args:
a (:class:`.algebra.Element`): First element in addition.
b (:class:`.algebra.Element`): Second element in addition.
Returns:
:class:`.algebra.Element`: Sum of the elements.
"""
raise NotImplementedError(
f"Addition not implemented for "
f'"{type(a).__name__}" and "{type(b).__name__}".'
)
@_dispatch
def mul(a, b):
"""Multiply two elements.
Args:
a (:class:`.algebra.Element`): First element in product.
b (:class:`.algebra.Element`): Second element in product.
Returns:
:class:`.algebra.Element`: Product of the elements.
"""
raise NotImplementedError(
f"Multiplication not implemented for "
f'"{type(a).__name__}" and "{type(b).__name__}".'
)
@_dispatch
def get_algebra(a):
"""Get the algebra of an element.
Args:
a (:class:`.algebra.Element`): Element to get algebra of.
Returns:
type: Algebra of `a`.
"""
raise RuntimeError(f'Could not determine algebra type of "{type(a).__name__}".')
# Register the default algebra.
@_dispatch
def get_algebra(a: Element):
return Element
new_cache = {} #: Cache for `.algebra.new`.
def new(a, t):
"""Create a new specialised type.
Args:
a (:class:`.algebra.Element`): Element to create new type for.
t (type): Type to create.
Returns:
type: Specialisation of `t` appropriate for `a`.
"""
try:
return new_cache[type(a), t]
except KeyError:
algebra = get_algebra(a)
# Determine candidates.
algebra_types = set(get_subclasses(algebra))
element_types = {t} | set(get_subclasses(t))
candidates = list(algebra_types & element_types)
# Reject concrete parametric types.
candidates = [
c
for c in candidates
if not (
hasattr(c, "parametric")
and c.parametric
and hasattr(c, "concrete")
and c.concrete
)
]
# The most specific types are the ones we are looking for.
candidates = filter_most_specific(candidates)
# There should only be a single candidate.
if len(candidates) != 1:
raise RuntimeError(
f'Could not determine "{t.__name__}" for algebra "{algebra.__name__}".'
)
new_cache[type(a), t] = candidates[0]
return new_cache[type(a), t]
def filter_most_specific(types):
"""From a list of types, determine the most specific ones.
Args:
types (list[type]): List of types.
Returns:
list[type]: Most specific types in `types`.
"""
filtered_types = []
while len(types) > 0:
t, types = types[0], types[1:]
# If `t` is a supertype, discard it. Otherwise, keep it.
if not (
any(issubclass(u, t) for u in types)
or any(issubclass(u, t) for u in filtered_types)
):
filtered_types.append(t)
return filtered_types
|
Algebra2x
|
/Algebra2x-0.1-py3-none-any.whl/algebra2x/algebra.py
|
algebra.py
|
from .. import _dispatch
from ..algebra import proven, new, Element, Zero, One, Wrapped, Join
from ..util import identical
__all__ = ["Scaled", "Product"]
class Scaled(Wrapped):
"""Scaled element.
Args:
e (:class:`.algebra.Element`): Element to scale.
scale (tensor): Scale.
"""
def __init__(self, e, scale):
Wrapped.__init__(self, e)
self.scale = scale
@property
def num_factors(self):
return self[0].num_factors + 1
def render_wrap(self, e, formatter):
return f"{formatter(self.scale)} * {e}"
def factor(self, i):
if i >= self.num_factors:
raise IndexError("Index out of range.")
else:
return self.scale if i == 0 else self[0].factor(i - 1)
@_dispatch
def __eq__(self, other: "Scaled"):
return self[0] == other[0] and identical(self.scale, other.scale)
class Product(Join):
"""Product of elements."""
@property
def num_factors(self):
return self[0].num_factors + self[1].num_factors
def factor(self, i):
if i >= self.num_factors:
raise IndexError("Index out of range.")
if i < self[0].num_factors:
return self[0].factor(i)
else:
return self[1].factor(i - self[0].num_factors)
def render_join(self, e1, e2, formatter):
return f"{e1} * {e2}"
@_dispatch
def __eq__(self, other: "Product"):
way1 = self[0] == other[0] and self[1] == other[1]
way2 = self[0] == other[1] and self[1] == other[0]
return way1 or way2
# Generic multiplication.
@_dispatch
def mul(a: Element, b):
if identical(b, 0):
return new(a, Zero)()
elif identical(b, 1):
return a
else:
return new(a, Scaled)(a, b)
@_dispatch
def mul(a, b: Element):
return mul(b, a)
@_dispatch
def mul(a: Element, b: Element):
return new(a, Product)(a, b)
# Cancel redundant zeros and ones.
@_dispatch(precedence=proven())
def mul(a: Zero, b):
return a
@_dispatch(precedence=proven())
def mul(a, b: Zero):
return b
@_dispatch(precedence=proven())
def mul(a: Zero, b: Zero):
return a
@_dispatch(precedence=proven())
def mul(a: One, b: Element):
return b
@_dispatch(precedence=proven())
def mul(a: Element, b: One):
return a
@_dispatch(precedence=proven())
def mul(a: One, b: One):
return a
# Group factors and terms if possible.
@_dispatch
def mul(a, b: Scaled):
return mul(b.scale * a, b[0])
@_dispatch
def mul(a: Scaled, b):
return mul(a.scale * b, a[0])
@_dispatch
def mul(a: Scaled, b: Element):
return mul(a.scale, mul(a[0], b))
@_dispatch
def mul(a: Element, b: Scaled):
return mul(b.scale, mul(a, b[0]))
@_dispatch
def mul(a: Scaled, b: Scaled):
return new(a, Scaled)(mul(a[0], b[0]), a.scale * b.scale)
|
Algebra2x
|
/Algebra2x-0.1-py3-none-any.whl/algebra2x/ops/mul.py
|
mul.py
|
from typing import Union
from .diff import DerivativeFunction
from .select import SelectedFunction
from .shift import ShiftedFunction
from .stretch import StretchedFunction
from .tensor import TensorProductFunction
from .transform import InputTransformedFunction
from .. import _dispatch
from ..algebra import proven, new, add, mul
from ..function import (
Function,
OneFunction,
ZeroFunction,
WrappedFunction,
ScaledFunction,
SumFunction,
ProductFunction,
stretch,
shift,
select,
transform,
diff,
)
__all__ = ["ReversedFunction"]
class ReversedFunction(WrappedFunction):
"""Function with arguments reversed.
Args:
e (:class:`.elements.Function`): Function to reverse arguments of.
"""
def render_wrap(self, e, formatter):
return f"Reversed({e})"
@_dispatch
def __eq__(self, other: "ReversedFunction"):
return self[0] == other[0]
# A reversed elements will never need parentheses.
@_dispatch(precedence=proven())
def need_parens(el: Function, parent: ReversedFunction):
return False
@_dispatch(precedence=proven())
def need_parens(el: ReversedFunction, parent: Function):
return False
# Implement basic methods for reverse function.
@_dispatch
def reverse(a: Function):
return new(a, ReversedFunction)(a)
@_dispatch
def reverse(a: Union[ZeroFunction, OneFunction]):
return a
# Propagate reversal.
@_dispatch
def reverse(a: SumFunction):
return add(reverse(a[0]), reverse(a[1]))
@_dispatch
def reverse(a: ProductFunction):
return mul(reverse(a[0]), reverse(a[1]))
@_dispatch
def reverse(a: ScaledFunction):
return mul(a.scale, reverse(a[0]))
# Let reversal synergise with wrapped kernels.
@_dispatch
def reverse(a: ReversedFunction):
return a[0]
@_dispatch
def reverse(a: StretchedFunction):
return stretch(reverse(a[0]), *reversed(a.stretches))
@_dispatch
def reverse(a: ShiftedFunction):
return shift(reverse(a[0]), *reversed(a.shifts))
@_dispatch
def reverse(a: SelectedFunction):
return select(reverse(a[0]), *reversed(a.dims))
@_dispatch
def reverse(a: InputTransformedFunction):
return transform(reverse(a[0]), *reversed(a.fs))
@_dispatch
def reverse(a: DerivativeFunction):
return diff(reverse(a[0]), *reversed(a.derivs))
@_dispatch
def reverse(a: TensorProductFunction):
return new(a, TensorProductFunction)(*reversed(a.fs))
|
Algebra2x
|
/Algebra2x-0.1-py3-none-any.whl/algebra2x/ops/reverse.py
|
reverse.py
|
from .mul import mul, Scaled
from .. import _dispatch
from ..algebra import proven, new, Element, Zero, One, Join
from ..util import identical
__all__ = ["Sum"]
class Sum(Join):
"""Sum of elements."""
@property
def num_terms(self):
return self[0].num_terms + self[1].num_terms
def term(self, i):
if i >= self.num_terms:
raise IndexError("Index out of range.")
if i < self[0].num_terms:
return self[0].term(i)
else:
return self[1].term(i - self[0].num_terms)
def render_join(self, e1, e2, formatter):
return f"{e1} + {e2}"
@_dispatch
def __eq__(self, other: "Sum"):
way1 = self[0] == other[0] and self[1] == other[1]
way2 = self[0] == other[1] and self[1] == other[0]
return way1 or way2
# Generic addition.
@_dispatch
def add(a: Element, b):
if identical(b, 0):
return a
else:
return add(a, mul(b, new(a, One)()))
@_dispatch
def add(a, b: Element):
if identical(a, 0):
return b
else:
return add(mul(a, new(b, One)()), b)
@_dispatch
def add(a: Element, b: Element):
if a == b:
return mul(2, a)
else:
return new(a, Sum)(a, b)
# Cancel redundant zeros and ones.
@_dispatch(precedence=proven())
def add(a: Zero, b):
if identical(b, 0):
return a
else:
return mul(new(a, One)(), b)
@_dispatch(precedence=proven())
def add(a, b: Zero):
if identical(a, 0):
return b
else:
return mul(a, new(b, One)())
@_dispatch(precedence=proven())
def add(a: Zero, b: Zero):
return a
@_dispatch(precedence=proven())
def add(a: Element, b: Zero):
return a
@_dispatch(precedence=proven())
def add(a: Zero, b: Element):
return b
# Group factors and terms if possible.
@_dispatch
def add(a: Scaled, b: Element):
if a[0] == b:
return mul(a.scale + 1, b)
else:
return new(a, Sum)(a, b)
@_dispatch
def add(a: Element, b: Scaled):
if a == b[0]:
return mul(b.scale + 1, a)
else:
return new(a, Sum)(a, b)
@_dispatch
def add(a: Scaled, b: Scaled):
if a[0] == b[0]:
return mul(a.scale + b.scale, a[0])
else:
return new(a, Sum)(a, b)
|
Algebra2x
|
/Algebra2x-0.1-py3-none-any.whl/algebra2x/ops/add.py
|
add.py
|
# Algebraic-Expression-Parser
Control and handle any algebraic expression, as well as do common expression operations such as getting postfix, prefix, and expression tree.
# Requirements
- binarytree
```
pip install binarytree
```
## Installing
[`pip install Algebraic-Expression-Parser`](https://pypi.org/project/Algebraic-Expression-Parser/)
### Importing
```python
from AlgebraicExpressionParser import Expression
```
```python
expression = Expression(expression = "x+sin(90)^2*y",
operators = {'+', 'sin', '^', '*'},
operators_info = {'+': (2, 1), '*': (2, 2),'^': (2, 3), 'sin': (1, 4)},
operators_associativity = {'+': 'LR', '*': 'LR','^': 'RL', 'sin': 'RL'},
variables = {'x', 'y'})
```
```python
expression.postfix()
```
```text
['x', '90', 'sin', '2', '^', 'y', '*', '+']
```
```python
expression.prefix()
```
```text
['+', 'x', '*', '^', 'sin', '90', '2', 'y']
```
```python
expression.tree()
```
```text
+___________
/ \
x __*
/ \
____^ y
/ \
sin 2
\
90
```
```python
expression.tree().inorder
```
```text
[Node(X), Node(+), Node(sin), Node(90), Node(^), Node(2), Node(*), Node(Y)]
```
|
Algebraic-Expression-Parser
|
/Algebraic-Expression-Parser-0.0.4.tar.gz/Algebraic-Expression-Parser-0.0.4/README.md
|
README.md
|
from typing import Any, List, Optional
class Node:
def __init__(self, value: Any, *, left: Optional["Node"] = None, right: Optional["Node"] = None) -> None:
self.value = value
self.left = left
self.right = right
@property
def left(self) -> "Node":
return self._left
@left.setter
def left(self, left: "Node") -> None:
if isinstance(left, Node) or left == None:
self._left = left
else:
raise TypeError(
f"{left}Node children has to be Node instances.")
@property
def right(self) -> "Node":
return self._right
@right.setter
def right(self, right: "Node") -> None:
if isinstance(right, Node) or right == None:
self._right = right
else:
raise TypeError(
f"Node children has to be Node instances.")
def __str__(self) -> str:
return f"Node: (value: {self.value}, left: {self.left}, right: {self.right})"
def __repr__(self) -> str:
return f"Node(value={self.value}, left={self.left}, right={self.right})"
def _preorder(self, root: "Node", result: List) -> None:
if not root:
return
result.append(root.value)
self._preorder(root.left, result)
self._preorder(root.right, result)
def _inorder(self, root: "Node", result: List) -> None:
if not root:
return
self._inorder(root.left, result)
result.append(root.value)
self._inorder(root.right, result)
def _postorder(self, root: "Node", result: List) -> None:
if not root:
return
self._postorder(root.left, result)
self._postorder(root.right, result)
result.append(root.value)
def preorder(self) -> List["Node"]:
result = []
self._preorder(self, result)
return result
def inorder(self) -> List["Node"]:
result = []
self._inorder(self, result)
return result
def postorder(self) -> List["Node"]:
result = []
self._postorder(self, result)
return result
|
Algebraic-Expression-Parser
|
/Algebraic-Expression-Parser-0.0.4.tar.gz/Algebraic-Expression-Parser-0.0.4/AlgebraicExpressionParser/parser/node.py
|
node.py
|
import copy
from typing import List, Optional, Set, Union
class Operator:
"""Operator rules holder"""
prefix = "prefix"
infix = "infix"
postfix = "postfix"
unary = 1
binary = 2
ltr = "LR"
rtl = "RL"
def __init__(self, *, symbol: str, type: Optional["int"] = binary, precedence: Optional["int"] = 1, associativity: Optional["str"] = ltr, position: Optional["str"] = infix) -> None:
"""
symbol: represents the operator.
type: str
type: represents the operator type. It accepts two values unary or binary.
type: str
default: binary
precedence: represents the operator precedence.
type: int
default: 1
associativity: represents the operator associativity. It accept two values ltr for left-to-right or rtl right-to-left.
type: str
default: ltr
position: string represents the operator position. It accept three values prefix, infix or postfix.
type: str
default: infix
"""
self.symbol = symbol
self.type = type
self.precedence = precedence
self.associativity = associativity
self.position = position
@property
def symbol(self) -> str:
return self._symbol
@symbol.setter
def symbol(self, symbol: str) -> None:
if not isinstance(symbol, str):
raise TypeError(
f"Invalid operator symbol. It has to be str.")
self._symbol = symbol
@property
def type(self) -> int:
return self._type
@type.setter
def type(self, type: int) -> None:
if not type in [self.unary, self.binary]:
raise TypeError(
f"Invalid operator type.")
self._type = type
@property
def precedence(self) -> int:
return self._precedence
@precedence.setter
def precedence(self, precedence: int) -> None:
if not isinstance(precedence, int):
raise TypeError(
f"Invalid operator precedence. It has to be int.")
self._precedence = precedence
@property
def associativity(self) -> str:
return self._associativity
@associativity.setter
def associativity(self, associativity: str) -> None:
if not associativity in [self.ltr, self.rtl]:
raise TypeError(
f"Invalid operator associativity.")
self._associativity = associativity
@property
def position(self) -> str:
return self._position
@position.setter
def position(self, position: str) -> None:
if not position in [self.prefix, self.infix, self.postfix]:
raise TypeError(
f"Invalid operator position.")
self._position = position
def __str__(self) -> str:
return f"symbol: {self.symbol}\ntype: {self.type}\nprecedence: {self.precedence}\nassociativity: {self.associativity}\nposition: {self.position}"
def __repr__(self) -> str:
return f"Operator(symbol='{self.symbol}', type={self.type}, precedence={self.precedence}, associativity='{self.associativity}')"
class Operators:
"""Operators holder"""
def __init__(self, operators: Union[List[Operator], Set[Operator]]) -> None:
"""
operators: list of Operator instances that holds operators symbols and rules.
type: list or set
"""
self.operators = operators
@property
def operators(self) -> List[Operator]:
return self._operators
@operators.setter
def operators(self, operators: Union[List[Operator], Set[Operator]]) -> None:
if not isinstance(operators, (list, set)):
raise TypeError(
f"operators has to be list. {operators} is {type(operators)}.")
self._operators = set(copy.copy(operators))
self._validate()
def __str__(self) -> str:
return f"operators: {self.operators}"
def __repr__(self) -> str:
return f"Operators({self.operators})"
def _validate(self) -> bool:
for operator in self.operators:
if not isinstance(operator, Operator):
raise TypeError(
f"operators has to be list of Operator instances. {operator} is {type(operator)}.")
return True
def add_operator(self, operator: Operator) -> None:
if not isinstance(operator, Operator):
raise TypeError(
f"operator has to be Operator instance. {operator} is {type(operator)}.")
self.operators.add(operator)
def get_operators(self) -> Set[Operator]:
"""Return set that contains all operators."""
return {operator for operator in self.operators}
def get_operators_symbol(self) -> Set[str]:
"""Return set that contains all operators symbols."""
return {operator.symbol for operator in self.operators}
def get_binary_operators_symbols(self) -> Set[str]:
"""Return set that contains all binary operators symbols."""
return {operator.symbol for operator in self.operators if operator.type == Operator.binary}
def get_binary_operators(self) -> Set[Operator]:
"""Return set that contains all binary operators."""
return {operator for operator in self.operators if operator.type == Operator.binary}
def get_unary_operators_symbols(self) -> Set[str]:
"""Return set that contains all unary operators symbols."""
return {operator.symbol for operator in self.operators if operator.type == Operator.unary}
def get_unary_operators(self) -> Set[Operator]:
"""Return set that contains all unary operators."""
return {operator for operator in self.operators if operator.type == Operator.unary}
def is_operator(self, c: str) -> bool:
return c in self.get_operators_symbol()
def is_binary_operator(self, c: str) -> bool:
return c in self.get_binary_operators_symbols()
def is_unary_operator(self, c: str) -> bool:
return c in self.get_unary_operators_symbols()
def get_operator_rules(self, c: str) -> Set[Operator]:
"""Return all operator rules. There are Some operator has many rules, like '-', it may be minus or negative."""
return {operator for operator in self.operators if operator.symbol == c}
def does_have_higher_precedence(self, operator1: Operator, operator2: Operator) -> bool:
# if operator1.precedence == operator2.precedence:
# return operator1.associativity == Operator.ltr
# return operator1.precedence > operator2.precedence
return (operator2.associativity == Operator.ltr and operator2.precedence <= operator1.precedence) or (operator2.associativity == Operator.rtl and operator2.precedence < operator1.precedence)
|
Algebraic-Expression-Parser
|
/Algebraic-Expression-Parser-0.0.4.tar.gz/Algebraic-Expression-Parser-0.0.4/AlgebraicExpressionParser/parser/operators.py
|
operators.py
|
from typing import List, Set, Tuple, Union
from collections import deque
import copy
from AlgebraicExpressionParser.exceptions.exceptions import *
from AlgebraicExpressionParser.parser.operators import Operators, Operator
from AlgebraicExpressionParser.parser.node import Node
escape_charcter = "$"
class ExpressionParser:
"""Algebraic expression parser."""
def __init__(self, operators: Operators, *, special_variables: Union[List[str], Set[str]] = set()) -> None:
"""
operators: represents operators rules.
type: Operators
special_variables: represents variables other than predefined ones(constants and one symbol variables).
type: list or set
default: empty set {}
"""
self.operators = operators
self.special_variables = special_variables
@property
def operators(self) -> Operators:
return self._operators
@operators.setter
def operators(self, operators: Operators) -> None:
if not isinstance(operators, Operators):
raise TypeError(
f"operators has to be an Operators instance. {operators} is {type(operators)}.")
self._operators = operators
@property
def special_variables(self) -> Operators:
return self._special_variables
@special_variables.setter
def special_variables(self, special_variables: Union[List[str], Set[str]]) -> None:
if not isinstance(special_variables, (set, list)):
raise TypeError(
f"special_variables has to be a set. {special_variables} is {type(special_variables)}.")
self._special_variables = set(copy.copy(special_variables))
def __str__(self) -> str:
return f"{self.operators}"
def __repr__(self) -> str:
return f"ExpressionParser({self.operators.__repr__()})"
def is_operand(self, c: str) -> bool:
return self._is_constant(c) or self._is_variable(c) or c in self.special_variables
@staticmethod
def _is_variable(c: str) -> bool:
return c.isalpha() and len(c) == 1
@staticmethod
def _is_constant(c: str) -> bool:
if len(c.strip()) != len(c) or c[0] == '+' or c[0] == '-' or c is None:
return False
try:
float(c)
return True
except ValueError:
return False
@staticmethod
def is_open_bracket(c: str) -> bool:
return c == "(" or c == "[" or c == "{"
@staticmethod
def is_close_bracket(c: str) -> bool:
return c == ")" or c == "]" or c == "}"
def _is_bracket(self, c: str) -> bool:
return self.is_close_bracket(c) or self.is_open_bracket(c)
@staticmethod
def _are_pairs(bracket1: str, bracket2: str) -> bool:
"""Return True if the two brackets has the same type."""
if bracket2 == "}" and bracket1 == "{":
return True
elif bracket2 == ")" and bracket1 == "(":
return True
elif bracket2 == "]" and bracket1 == "[":
return True
return False
def _is_valid_token(self, token: str) -> bool:
return self._is_bracket(token) or self.is_operand(token) or self.operators.is_operator(token) or token.isspace() or token == escape_charcter
def _find_next_matching_token(self, expression: str, start_idx: int) -> Tuple[str, int]:
token = ''
accepted_lexeme = ''
longest_idx = start_idx
while start_idx < len(expression):
token += expression[start_idx]
if self._is_valid_token(token):
accepted_lexeme = token
longest_idx = start_idx
start_idx += 1
return (accepted_lexeme, longest_idx)
def tokenize(self, expression: str) -> List[str]:
"""Split the expression into tokens"""
idx = 0
tokens = []
while idx < len(expression):
token, next_idx = self._find_next_matching_token(expression, idx)
if not token:
raise InvalidExpressionException(
"expression is not valid.")
idx = next_idx + 1
tokens.append(token)
return tokens
def _parse(self, tokens: List[str], tokens_postfix: List[str]) -> None:
"""validates expression tokens and constructs postfix form from given tokens."""
if not tokens:
raise InvalidExpressionException(
"expression is not valid.")
sz = len(tokens)
operators_stack = deque()
is_previous_character_operand = False
i = 0
while i < sz:
if self.is_open_bracket(tokens[i]):
if is_previous_character_operand:
raise InvalidExpressionException(
"expression is not valid.")
open_brackets_count = 0
idx = i
# find its close bracket.
while open_brackets_count != 1 or not self.is_close_bracket(tokens[idx]):
if tokens[idx] == escape_charcter:
idx += 1
elif self.is_open_bracket(tokens[idx]):
open_brackets_count += 1
elif self.is_close_bracket(tokens[idx]):
open_brackets_count -= 1
idx += 1
if idx >= sz:
raise InvalidParenthesesException(
"expression's parenthesis are not balanced.")
if not self._are_pairs(tokens[i], tokens[idx]):
raise InvalidParenthesesException(
"expression's parenthesis are not balanced.")
self._parse(tokens[i + 1: idx], tokens_postfix)
i = idx
is_previous_character_operand = True
elif self.is_close_bracket(tokens[i]):
raise InvalidParenthesesException(
"expression's parenthesis are not balanced.")
elif tokens[i].isspace():
i += 1
continue
elif tokens[i] == escape_charcter:
if is_previous_character_operand:
raise InvalidExpressionException(
"expression is not valid.")
is_previous_character_operand = True
tokens_postfix.append(tokens[i])
i += 1
tokens_postfix.append(tokens[i])
elif self.operators.is_operator(tokens[i]):
unary_rule = binary_rule = None
is_valid = False
for rule in self.operators.get_operator_rules(tokens[i]):
if rule.type == Operator.unary:
unary_rule = rule
if rule.type == Operator.binary:
binary_rule = rule
if unary_rule:
if (unary_rule.position == Operator.postfix and is_previous_character_operand) or (unary_rule.position == Operator.prefix and not is_previous_character_operand):
is_valid = True
binary_rule = None
if binary_rule:
if is_previous_character_operand:
is_valid = True
unary_rule = None
is_previous_character_operand = False
if not is_valid:
raise InvalidExpressionException(
"expression is not valid.")
while operators_stack and self.operators.does_have_higher_precedence(operators_stack[-1][1], unary_rule if unary_rule else binary_rule):
tokens_postfix.append(operators_stack[-1][1])
operators_stack.pop()
operators_stack.append(
(tokens[i], unary_rule if unary_rule else binary_rule))
elif self.is_operand(tokens[i]):
if is_previous_character_operand:
raise InvalidExpressionException(
"expression is not valid.")
is_previous_character_operand = True
tokens_postfix.append(tokens[i])
else:
raise InvalidExpressionException(
"expression is not valid.")
i += 1
if not is_previous_character_operand:
raise InvalidExpressionException(
"expression is not valid.")
while operators_stack:
tokens_postfix.append(operators_stack[-1][1])
operators_stack.pop()
def postfix(self, expression: str, include_operators_rules: bool = False) -> List[str]:
"""Return the postfix form for the expression."""
if not isinstance(expression, str):
raise TypeError(
f"expression has to be str. {expression} is {type(expression)}, not str.")
tokens = self.tokenize(expression)
postfix = []
self._parse(tokens, postfix)
if not include_operators_rules:
postfix = [c.symbol if isinstance(c, Operator) else c for c in postfix]
return postfix
def syntax_tree(self, expression: str) -> Node:
"""Return the expression syntax tree."""
postfix = self.postfix(expression, include_operators_rules=True)
stack = deque()
i = 0
while i < len(postfix):
node = Node(postfix[i])
if isinstance(postfix[i], Operator):
node = Node(postfix[i].symbol)
if postfix[i].type == Operator.unary:
if postfix[i].position == Operator.postfix:
if len(stack) < 1:
raise InvalidExpressionException(
"expression is not valid.")
node.left = stack.pop()
if postfix[i].position == Operator.prefix:
if len(stack) < 1:
raise InvalidExpressionException(
"expression is not valid.")
node.right = stack.pop()
if postfix[i].type == Operator.binary:
if len(stack) < 2:
raise InvalidExpressionException(
"expression is not valid.")
node.right = stack.pop()
node.left = stack.pop()
stack.append(node)
i += 1
return stack.pop()
|
Algebraic-Expression-Parser
|
/Algebraic-Expression-Parser-0.0.4.tar.gz/Algebraic-Expression-Parser-0.0.4/AlgebraicExpressionParser/parser/parser.py
|
parser.py
|
# AlgebraicNumber
[](https://pypi.python.org/pypi/AlgebraicNumber)
[](https://gitlab.com/ydethe/algebraicnumber/pipelines)
[](https://codecov.io/gl/ydethe/algebraicnumber)
A library to manipulate algebraic numbers
## Documentation
To generate the documentation,run:
nox
https://ydethe.gitlab.io/algebraicnumber/docs
## Usage
>>> z = AlgebraicNumber.unity() + AlgebraicNumber.imaginary()
>>> z.poly.printCoeff()
'[2,-2,1]'
>>> p = z*z.conj()
>>> p.poly.printCoeff()
'[-2,1]'
|
AlgebraicNumber
|
/AlgebraicNumber-2.4.4.tar.gz/AlgebraicNumber-2.4.4/README.md
|
README.md
|
# Algmon Digital Brain
## Features
* innovated framework for agent & metaverse
## Steps for Publishing the AlgmonDigitalBrain Package
1. python -m build
2. (optional) twine upload -r testpypi dist/*
3. twine upload dist/*
# **Citings**
* SmellBrain
* TouchBrain
* VisionBrain
* LanguageBrain
* SpeechBrain
```bibtex
@misc{speechbrain,
title={SpeechBrain: A General-Purpose Speech Toolkit},
author={Mirco Ravanelli and Titouan Parcollet and Peter Plantinga and Aku Rouhe and Samuele Cornell and Loren Lugosch and Cem Subakan and Nauman Dawalatabad and Abdelwahab Heba and Jianyuan Zhong and Ju-Chieh Chou and Sung-Lin Yeh and Szu-Wei Fu and Chien-Feng Liao and Elena Rastorgueva and François Grondin and William Aris and Hwidong Na and Yan Gao and Renato De Mori and Yoshua Bengio},
year={2021},
eprint={2106.04624},
archivePrefix={arXiv},
primaryClass={eess.AS}
}
```
|
AlgmonDigitalBrain
|
/AlgmonDigitalBrain-0.0.4.tar.gz/AlgmonDigitalBrain-0.0.4/README.md
|
README.md
|
import time
def Linear(l, valu):
cout = len(l)
for i in range(0, len(l)):
if (l[i] == valu):
print(l[0:i], '\x1b[6;30;42m' + str(l[i]) + '\x1b[0m', l[i + 1:len(l)])
print(l[i], "is equal to", valu)
cout -= 1
time.sleep(1)
break
else:
print(l[i], "is not equal to", valu, )
print(l[0:i], '\x1b[6;30;42m' + str(l[i]) + '\x1b[0m', " ", l[i + 1:len(l)])
print()
cout += 1
time.sleep(1)
if (cout != len(l)):
print("Value Found : ", valu)
else:
print("The value not present in current list")
def Binary(l, x):
def n(l, x):
l.sort()
print("In a Binary Search the given list is must in sorted order")
low = 0
high = len(l) - 1
mid = 0
r = -1
while low <= high:
print('low index', low)
print('high index', high)
print('mid index', mid)
mid = (high + low) // 2
if l[mid] < x:
print(l[0:l[mid]], '\x1b[6;30;42m' + str(l[mid]) + '\x1b[0m', " ", l[mid + 1:len(l)])
print(l[mid], " is less than ", x)
low = mid + 1
print("low =mid+1", low)
print("\n")
time.sleep(1)
elif l[mid] > x:
print(l[0:l[mid]], '\x1b[6;30;42m' + str(l[mid]) + '\x1b[0m', l[mid + 1:len(l)])
print(l[mid], " is greater than ", x)
high = mid - 1
print("high =mid-1", high)
print("\n")
time.sleep(1)
else:
print(l[0:l[mid]], '\x1b[6;30;42m' + str(l[mid]) + '\x1b[0m', l[mid + 1:len(l)])
print("The mid value is target value : ", l[mid])
print("\n")
time.sleep(1)
return mid
return -1
r = n(l, x)
if r != -1:
print("Element is present at index", str(r))
else:
print("Element is not present in array")
def insertionSort(arr):
# Traverse through 1 to len(arr)
for i in range(1, len(arr)):
key = arr[i]
# Move elements of arr[0..i-1], that are
# greater than key, to one position ahead
# of their current position
j = i - 1
while j >= 0 and key < arr[j]:
arr[j + 1] = arr[j]
j -= 1
arr[j + 1] = key
print(arr[i], "less than or equal to ",key, )
print(arr[0:i], '\x1b[6;30;42m' + str(arr[i]) + '\x1b[0m', " ", arr[i + 1:len(l)])
time.sleep(1)
print("Sorted output", arr)
def Selectionsort(A):
for i in range(len(A)):
min_idx = i
for j in range(i + 1, len(A)):
if A[min_idx] > A[j]:
min_idx = j
A[i], A[min_idx] = A[min_idx], A[i]
print(A[i], "less than or equal to ", )
print(A[0:i], '\x1b[6;30;42m' + str(A[i]) + '\x1b[0m', " ", A[i + 1:len(l)])
time.sleep(1)
print("Sorted output", A)
def bubbleSort(arr):
n = len(arr)
for i in range(n - 1):
for j in range(0, n - i - 1):
if arr[j] > arr[j + 1]:
arr[j], arr[j + 1] = arr[j + 1], arr[j]
print(arr[i], "greater than previous value ", )
print(arr[0:i], '\x1b[6;30;42m' + str(arr[i]) + '\x1b[0m', " ", arr[i + 1:len(l)])
time.sleep(1)
print("Sorted output", arr)
def shellSort(arr):
gap = len(arr) // 2
while gap > 0:
i = 0
j = gap
while j < len(arr):
if arr[i] > arr[j]:
arr[i], arr[j] = arr[j], arr[i]
i += 1
j += 1
k = i
while k - gap > -1:
if arr[k - gap] > arr[k]:
arr[k - gap], arr[k] = arr[k], arr[k - gap]
k -= 1
print(arr[i], "greater than previous value ")
print(arr[0:i], '\x1b[6;30;42m' + str(arr[i]) + '\x1b[0m', " ", arr[i + 1:len(l)])
time.sleep(1)
gap //= 2
print("Sorted output",arr)
def __init__():
l = list(map(int, input("Enter the list in space separated value : ").strip().split()))
m=input("Enter your Algo Sort or Search :")
if(m== "Search"):
v = int(input("Enter the Target value to find : "))
n = input("Enter the Search mode Binary or Linear : ")
else:
n=input("Enter Bubble sort ,Insertion sort, Shell sort,Selection sort :")
if (n == 'Binary'):
Binary(l, v)
elif (n == 'Linear'):
Linear(l, v)
elif (n == 'Bubble sort'):
bubbleSort(l)
elif (n == 'Insertion sort'):
insertionSort(l)
elif (n == 'Selection sort'):
Selectionsort(l)
elif (n == 'Shell sort'):
shellSort(l)
else:
print("Wrong Input")
|
Algo-Vi
|
/Algo-Vi-1.0.3.tar.gz/Algo-Vi-1.0.3/Search-Vi/__main__.py
|
__main__.py
|
import yfinance as yf
import numpy as np
import pandas as pd
import os
import multiprocessing
from datetime import date, timedelta
import datetime
import json
from ..chart import chart
def moving_average_ema(Job, pid):
try:
category = Job["Method"]
tc = yf.Ticker(Job["Ticker"])
df = tc.history(period=Job["look_back_period"])
dates = df.index.tolist()
ref_len = len(dates)
ref_date = dates[0]
corr_date = ref_date - timedelta(days=5000)
df = tc.history(start=corr_date)
corr_len = len(df.index.tolist())
adj_len = corr_len - ref_len
l = Job["Long_Term_Period"]
s = Job["Short_Term_Period"]
l_label = f"EMA_{l}"
s_label = f"EMA_{s}"
df[l_label] = df.Close.ewm(span=Job["Long_Term_Period"], min_periods=1).mean()
df[s_label] = df.Close.ewm(span=Job["Short_Term_Period"], min_periods=1).mean()
df = df.iloc[adj_len:, :]
# Take Position on the Start Day
# Caculate NetPl
dates_1 = [dates[0]]
actions = [0]
cash_on_hand = Job["Capital"]
position = 0 # 1 denotes taking a long position
long_positions = []
square_offs = []
summary = {}
net_pl = 0
for i in range(1, len(df)):
curr_long = df.iloc[i][l_label]
curr_short = df.iloc[i][s_label]
prev_long = df.iloc[i - 1][l_label]
prev_short = df.iloc[i - 1][s_label]
if (
(curr_short > curr_long)
and (prev_short < prev_long)
and (position == 0)
):
# Generate Buy Signal
actions.append(1)
shares = int(cash_on_hand / df.iloc[i]["Close"])
investment_value = shares * df.iloc[i]["Close"]
cash_on_hand -= investment_value
date = dates[i]
d = {}
d["Shares"] = shares
d["Date"] = date.isoformat()
d["Investment_Value"] = investment_value
d["Action"] = "Buy"
d["Buy_Price"] = df.iloc[i]["Close"]
long_positions.append(d)
position = 1
# check if stop_Loss key exists in Job
elif "stop_Loss" in Job and position == 1:
actions.append(2)
flag = long_positions[-1]["Buy_Price"] * (1 - (Job["stop_Loss"] * 0.01))
print(flag, long_positions[-1]["Buy_Price"])
if df.iloc[i]["Close"] < flag:
print("Stop Loss Triggered",Job["Ticker"])
# Generate Sell Signal
prev_position = long_positions[-1]
new_value = df.iloc[i]["Close"] * prev_position["Shares"]
cash_on_hand += new_value
shares = long_positions[-1]["Shares"]
investment_value = shares * df.iloc[i]["Close"]
cash_on_hand += investment_value
date = dates[i].isoformat()
d = {}
d["Shares"] = prev_position["Shares"]
d["Date"] = dates[i].isoformat()
d["Investment_Value"] = new_value
d["Action"] = "Sell"
d["Sell_Price"] = df.iloc[i]["Close"]
d["Type"] = "Stop Loss"
netpl = new_value - prev_position["Investment_Value"]
net_pl += netpl
d["Net_PL"] = netpl
square_offs.append(d)
position = 0
elif (
(curr_short < curr_long)
and (prev_short > prev_long)
and (position == 1)
):
# Square of the Position
actions.append(-1)
prev_position = long_positions[-1]
new_value = df.iloc[i]["Close"] * prev_position["Shares"]
cash_on_hand += new_value
d = {}
d["Shares"] = prev_position["Shares"]
d["Date"] = dates[i].isoformat()
d["Investment_Value"] = new_value
d["Action"] = "Sell"
d["Sell_Price"] = df.iloc[i]["Close"]
netpl = new_value - prev_position["Investment_Value"]
net_pl += netpl
d["Net_PL"] = netpl
square_offs.append(d)
position = 0
else:
actions.append(0)
df["Signal"] = actions
summary["Cash_on_Hand"] = cash_on_hand
summary["Net_PL"] = net_pl
summary["Buy_Signals"] = long_positions
summary["Sell_Signals"] = square_offs
summary["Job_ID"] = pid
summary["Job_details"] = Job
if len(long_positions) > len(square_offs):
summary["Current_Investment"] = long_positions[-1]
data = {}
tick = Job["Ticker"]
m=chart.generate_and_save_chart(df,Job,pid)
summary["Chart"] = m
data[f"{pid}_{tick}_{category}"] = summary
return data
except (Exception) as e:
print(e)
|
AlgoAnalyzer
|
/technicals/ema_crossover.py
|
ema_crossover.py
|
import yfinance as yf
import numpy as np
import pandas as pd
import os
import multiprocessing
from datetime import date, timedelta
import datetime
import json
from ..chart import chart
def macd(Job, pid):
category = Job["Method"]
tc = yf.Ticker(Job["Ticker"])
df = tc.history(period=Job["look_back_period"])
dates = df.index.tolist()
ref_len = len(dates)
ref_date = dates[0]
corr_date = ref_date - timedelta(days=5000)
df = tc.history(start=corr_date)
corr_len = len(df.index.tolist())
adj_len = corr_len - ref_len
l = 26
s = 12
l_label = f"EMA_12"
s_label = f"EMA_26"
df["macd"] = (
df.Close.ewm(span=12, min_periods=1).mean()
- df.Close.ewm(span=26, min_periods=1).mean()
)
df["signal"] = df.macd.ewm(span=9, min_periods=1).mean()
df = df.iloc[adj_len:, :]
dates_1 = [dates[0]]
actions = [0]
cash_on_hand = Job["Capital"]
position = 0 # 1 denotes taking a long position
long_positions = []
square_offs = []
summary = {}
net_pl = 0
dates = df.index.tolist()
for i in range(1, len(df)):
curr_signal = df.iloc[i]["signal"]
curr_macd = df.iloc[i]["macd"]
prev_signal = df.iloc[i - 1]["signal"]
prev_macd = df.iloc[i - 1]["macd"]
if (curr_macd > curr_signal) and (prev_signal > prev_macd) and (position == 0):
# Generate Buy Signal
actions.append(1)
shares = int(cash_on_hand / df.iloc[i]["Close"])
investment_value = shares * df.iloc[i]["Close"]
cash_on_hand -= investment_value
date = dates[i]
d = {}
d["Shares"] = shares
d["Date"] = date.isoformat()
d["Investment_Value"] = investment_value
d["Action"] = "Buy"
d["Buy_Price"] = df.iloc[i]["Close"]
long_positions.append(d)
position = 1
elif "stop_Loss" in Job and position == 1:
if df.iloc[i]["Close"] < long_positions[-1]["Buy_Price"] * (
1 - (Job["stop_Loss"] / 100)
):
# Generate Sell Signal
actions.append(2)
shares = long_positions[-1]["Shares"]
investment_value = shares * df.iloc[i]["Close"]
cash_on_hand += investment_value
date = dates[i].isoformat()
d = {}
d["Shares"] = prev_position["Shares"]
d["Date"] = dates[i].isoformat()
d["Investment_Value"] = new_value
d["Action"] = "Sell"
d["Sell_Price"] = df.iloc[i]["Close"]
d["Type"] = "Stop Loss"
netpl = new_value - prev_position["Investment_Value"]
net_pl += netpl
d["Net_PL"] = netpl
square_offs.append(d)
position = 0
elif (curr_macd < curr_signal) and (prev_macd > prev_signal) and (position == 1):
# Square of the Position
actions.append(-1)
prev_position = long_positions[-1]
new_value = df.iloc[i]["Close"] * prev_position["Shares"]
cash_on_hand += new_value
d = {}
d["Shares"] = prev_position["Shares"]
d["Date"] = dates[i].isoformat()
d["Investment_Value"] = new_value
d["Action"] = "Sell"
d["Sell_Price"] = df.iloc[i]["Close"]
netpl = new_value - prev_position["Investment_Value"]
net_pl += netpl
d["Net_PL"] = netpl
square_offs.append(d)
position = 0
else:
actions.append(0)
df["Signal"] = actions
summary["Net_PL"] = net_pl
summary["Buy_Signals"] = long_positions
summary["Sell_Signals"] = square_offs
summary["Job_ID"] = pid
summary["Job_details"] = Job
if len(long_positions) > len(square_offs):
summary["Current_Investment"] = long_positions[-1]
data = {}
tick = Job["Ticker"]
m=chart.generate_macd_chart(df, Job,pid)
summary["Chart"] = m
data[f"{pid}_{tick}_{category}"] = summary
return data
|
AlgoAnalyzer
|
/technicals/macd.py
|
macd.py
|
import yfinance as yf
import numpy as np
import pandas as pd
import os
import multiprocessing
from datetime import date, timedelta
import datetime
import json
from ..chart import chart
import sys
def moving_average_sma(Job, pid):
category = Job["Method"]
tc = yf.Ticker(Job["Ticker"])
df = tc.history(period=Job["look_back_period"])
dates = df.index.tolist()
ref_len = len(dates)
ref_date = dates[0]
corr_date = ref_date - timedelta(days=5000)
df = tc.history(start=corr_date)
corr_len = len(df.index.tolist())
adj_len = corr_len - ref_len
l = Job["Long_Term_Period"]
s = Job["Short_Term_Period"]
l_label = f"SMA_{l}"
s_label = f"SMA_{s}"
df[l_label] = df.Close.rolling(Job["Long_Term_Period"]).mean()
df[s_label] = df.Close.rolling(Job["Short_Term_Period"]).mean()
df = df.iloc[adj_len:, :]
dates_1 = [dates[0]]
actions = [0]
cash_on_hand = Job["Capital"]
position = 0 # 1 denotes taking a long position
long_positions = []
square_offs = []
summary = {}
net_pl = 0
for i in range(1, len(df)):
curr_long = df.iloc[i][l_label]
curr_short = df.iloc[i][s_label]
prev_long = df.iloc[i - 1][l_label]
prev_short = df.iloc[i - 1][s_label]
if (curr_short > curr_long) and (prev_short < prev_long) and (position == 0):
# Generate Buy Signal
actions.append(1)
shares = int(cash_on_hand / df.iloc[i]["Close"])
investment_value = shares * df.iloc[i]["Close"]
cash_on_hand -= investment_value
date = dates[i]
d = {}
d["Shares"] = shares
d["Date"] = date.isoformat()
d["Investment_Value"] = investment_value
d["Action"] = "Buy"
d["Buy_Price"] = df.iloc[i]["Close"]
long_positions.append(d)
position = 1
elif "stop_Loss" in Job and position == 1:
if df.iloc[i]["Close"] < long_positions[-1]["Buy_Price"] * (
1 - (Job["stop_Loss"] / 100)
):
# Generate Sell Signal
actions.append(-1)
shares = long_positions[-1]["Shares"]
investment_value = shares * df.iloc[i]["Close"]
cash_on_hand += investment_value
date = dates[i].isoformat()
d = {}
d["Shares"] = prev_position["Shares"]
d["Date"] = dates[i].isoformat()
d["Investment_Value"] = new_value
d["Action"] = "Sell"
d["Sell_Price"] = df.iloc[i]["Close"]
d["Type"] = "Stop Loss"
netpl = new_value - prev_position["Investment_Value"]
net_pl += netpl
d["Net_PL"] = netpl
square_offs.append(d)
position = 0
elif (curr_short < curr_long) and (prev_short > prev_long) and (position == 1):
# Square of the Position
actions.append(2)
prev_position = long_positions[-1]
new_value = df.iloc[i]["Close"] * prev_position["Shares"]
cash_on_hand += new_value
d = {}
d["Shares"] = prev_position["Shares"]
d["Date"] = dates[i].isoformat()
d["Investment_Value"] = new_value
d["Action"] = "Sell"
d["Sell_Price"] = df.iloc[i]["Close"]
netpl = new_value - prev_position["Investment_Value"]
net_pl += netpl
d["Net_PL"] = netpl
square_offs.append(d)
position = 0
else:
actions.append(0)
df["Signal"] = actions
summary["Net_PL"] = net_pl
summary["Buy_Signals"] = long_positions
summary["Sell_Signals"] = square_offs
summary["Job_ID"] = pid
summary["Job_details"] = Job
if len(long_positions) > len(square_offs):
summary["Current_Investment"] = long_positions[-1]
data = {}
tick = Job["Ticker"]
m=chart.generate_and_save_chart(df, Job,pid)
summary["Chart"] = m
data[f"{pid}_{tick}_{category}"] = summary
return data
|
AlgoAnalyzer
|
/technicals/sma_crossover.py
|
sma_crossover.py
|
This is a basic trading strategy backtesting library.
It can be used to backtest option strategies.
Three files are needed to backtest the option strategies.
One file with the futures data.
Another file with the call option data.
Third file with the put option data.
Ensure the below format for the files and the files should be of the csv format.
Futures file : Order of Columns : yyyy-mm-dd, date, month, year, expiry_date(of the format dd-(first three characters of month)-yy example : 26-May-22), open, high, low, close
Call Option File : Order of Columns : datetime year month Date expiry_date Time strike_price open high low close (example : 2022-01-03 9:15:00 2022 1 3 27-Jan-22 9:15:00 15500 1961.55 2010 1961.55 2004.45)
Put Option File : Order of Columns : datetime year month date expiry_date time strike_price open high low close (example : 2022-04-01 9:15:00 2022 4 1 28-Apr-22 9:15:00 15500 25.1 25.55 19.95 20.1)
Argument list for ironCondorStrategy :
self, putFile, callFile, spotFile, sellPut, sellCall, buyCall, buyPut, entry, exit, lotSize
example of code
from AlgoAshutosh import *
object = ironCondor()
object.ironCondorStrategy("/Users/username/Desktop/putOptionsFile.csv","/Users/username/Desktop/callOptionsFile.csv","/Users/username/Desktop/futuresFile.csv",100,100,200,200,1,10,50)
This means if spot price is 15000 then call sold at strike price 15000+100, call bought at 15000+200, put sold at strike price 15000-100, put bought at 15000-200, would be backtested.
The entry would be at the first day of the month and exit at expiry date - 10 days before.
So for example to exit position at expiry date pass that argument as 0.
50 represents the lot size.
Argument list for shortStraddleStrategy :
self, putFile, callFile, spotFile, entry, exit, lotSize
Argument list for shortStrangleStrategy :
self, putFile, callFile, spotFile, sellPut, sellCall, entry,exit, lotSize
|
AlgoAshutosh
|
/AlgoAshutosh-0.0.1.tar.gz/AlgoAshutosh-0.0.1/README.txt
|
README.txt
|
This library can make Derivative Analysis easy for the F&O stocks listed in Indian Stock Market.
It takes in input as the location of the folder where the Bhav Copy is stored for Cash Market and F&O Market.
And then it within the same folder can create the csv files for F&O Bhav copy analysis.
It shows the cumulative OI and cumulative OI change as well as in percentage.
The sample code is as below:
from AlgoFnO import *
list = ['RELIANCE','TCS','INFY']
a = BhavCopyAnalysis()
print(a.doAnalysis("/Users/<username>/Desktop/AlgoFnO Sample Data",list))
Using the above code basically we pass comma separated list of stocks and the location of the folder where the files are stored.
|
AlgoFnO
|
/AlgoFnO-0.0.1.tar.gz/AlgoFnO-0.0.1/readme.txt
|
readme.txt
|
# Project Title
Simplifying Regression and Classification Modeling
## Guide
### Installation setup
`pip install AlgoMaster`
### Classfication model
1. Initialize the model
`Classifier=AlgoMaster.Classifier(X,Y,test_size=0.2,random_state=20)`
2. Train the model and predict the results in table format
`Classifier.model_training()`
3. Ensemble technique
`Classifier.ensemble_prediction(No. of models)`
4. Single Training
To predict unseen data
`data=[1,2,3,4,5,6,7,8,9]
Classifier.logistic_test(data)
Classifier.KNeighbors_test(data)
Classifier.GaussianNB_test(data)
Classifier.Bagging_test(data)
Classifier.ExtraTrees_test(data)
Classifier.RandomForest_test(data)
Classifier.DecisionTree_test(data)
Classifier.AdaBoost_test(data)
Classifier.GradientBoosting_test(data)
Classifier.XGBoost_test(data)
Classifier.SGD_test(data)
Classifier.SVC_test(data)
Classifier.Ridge_test(data)
Classifier.BernoulliNB_test(data)`
5. Hyperparameter Turning
To find the best parameters for the model
`Classifier.hyperparameter_tuning()`
6. Single Hyperparameter Turning
To find the best parameters for the model
`Classifier.logistic_hyperparameter()
Classifier.KNeighbors_hyperparameter()
Classifier.GaussianNB_hyperparameter()
Classifier.Bagging_hyperparameter()
Classifier.ExtraTrees_hyperparameter()
Classifier.RandomForest_hyperparameter()
Classifier.DecisionTree_hyperparameter()
Classifier.AdaBoost_hyperparameter()
Classifier.GradientBoosting_hyperparameter()
Classifier.XGBoost_hyperparameter()
Classifier.SGD_hyperparameter()
Classifier.SVC_hyperparameter()
Classifier.Ridge_hyperparameter()
Classifier.BernoulliNB_hyperparameter()`
### Regression model
1. Initialize the model
`Regressor=AlgoMaster.Regressor(X,Y,test_size=0.2,random_state=20)`
2. Train the model and predict the results in table format
`Regressor.model_training()`
3. Ensemble technique
`Regressor.ensemble_prediction(No. of models)`
4. Single Training
`data=[1,2,3,4,5,6,7,8,9]
Regressor.LinearRegression_test(data)
Regressor.KNeighbors_test(data)
Regressor.Bagging_test(data)
Regressor.ExtraTrees_test(data)
Regressor.RandomForest_test(data)
Regressor.DecisionTree_test(data)
Regressor.AdaBoost_test(data)
Regressor.GradientBoosting_test(data)
Regressor.XGBoost_test(data)
Regressor.TheilSen_test(data)
Regressor.SVR_test(data)
Regressor.Ridge_test(data)
Regressor.RANSAC_test(data)
Regressor.ARD_test(data)
Regressor.BayesianRidge_test(data)
Regressor.HuberRegressor_test(data)
Regressor.Lasso_test(data)
Regressor.ElasticNet_test(data)`
5. Hyperparameter Turning
To find the best parameters for the model
`Regressor.hyperparameter_tuning()`
6. Single Hyperparameter Turning
To find the best parameters for the model
`Regressor.KNeighbors_hyperparameter()
Regressor.Bagging_hyperparameter()
Regressor.ExtraTrees_hyperparameter()
Regressor.RandomForest_hyperparameter()
Regressor.DecisionTree_hyperparameter()
Regressor.AdaBoost_hyperparameter()
Regressor.GradientBoosting_hyperparameter()
Regressor.XGBoost_hyperparameter()
Regressor.TheilSen_hyperparameter()
<!-- Regressor.SVR_hyperparameter() -->
Regressor.Ridge_hyperparameter()
Regressor.RANSAC_hyperparameter()
Regressor.ARD_hyperparameter()
Regressor.BayesianRidge_hyperparameter()
Regressor.Lasso_hyperparameter()
Regressor.ElasticNet_hyperparameter()`
|
AlgoMaster
|
/AlgoMaster-0.1.2.tar.gz/AlgoMaster-0.1.2/README.md
|
README.md
|
# AlgoPlus量化投资开源框架
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
import os
import csv
from AlgoPlus.CTP.MdApiBase import MdApiBase
from AlgoPlus.CTP.FutureAccount import FutureAccount
from AlgoPlus.ta.time_bar import tick_to_bar
from AlgoPlus.utils.base_field import to_str, to_bytes
from AlgoPlus.CTP.ApiStruct import DepthMarketDataField
class TickEngine(MdApiBase):
def __init__(self, broker_id, md_server, investor_id, password, app_id, auth_code, instrument_id_list, md_queue_list=None,
page_dir='', using_udp=False, multicast=False):
pass
def OnRtnDepthMarketData(self, pDepthMarketData):
# 将行情放入共享队列
for md_queue in self.md_queue_list:
md_queue.put(pDepthMarketData)
class BarEngine(MdApiBase):
def __init__(self, md_server, broker_id, investor_id, password, app_id, auth_code, instrument_id_list, md_queue_list=None,
page_dir='', using_udp=False, multicast=False):
pass
def init_extra(self):
# Bar字段
bar_cache = {
"UpdateTime": b"99:99:99",
"LastPrice": 0.0,
"HighPrice": 0.0,
"LowPrice": 0.0,
"OpenPrice": 0.0,
"BarVolume": 0,
"BarTurnover": 0.0,
"BarSettlement": 0.0,
"BVolume": 0,
"SVolume": 0,
"FVolume": 0,
"DayVolume": 0,
"DayTurnover": 0.0,
"DaySettlement": 0.0,
"OpenInterest": 0.0,
"LastVolume": 0,
"TradingDay": b"99999999",
}
self.bar_dict = {} # Bar字典容器
# 遍历订阅列表
for instrument_id in self.instrument_id_list:
# 将str转为byte
if not isinstance(instrument_id, bytes):
instrument_id = to_bytes(instrument_id.encode('utf-8'))
# 初始化Bar字段
self.bar_dict[instrument_id] = bar_cache.copy()
# ///深度行情通知
def OnRtnDepthMarketData(self, pDepthMarketData):
last_update_time = self.bar_dict[pDepthMarketData['InstrumentID']]["UpdateTime"]
is_new_1minute = (pDepthMarketData['UpdateTime'][:-2] != last_update_time[:-2]) and pDepthMarketData['UpdateTime'] != b'21:00:00' # 1分钟K线条件
# is_new_5minute = is_new_1minute and int(pDepthMarketData['UpdateTime'][-4]) % 5 == 0 # 5分钟K线条件
# is_new_10minute = is_new_1minute and pDepthMarketData['UpdateTime'][-4] == b"0" # 10分钟K线条件
# is_new_10minute = is_new_1minute and int(pDepthMarketData['UpdateTime'][-5:-3]) % 15 == 0 # 15分钟K线条件
# is_new_30minute = is_new_1minute and int(pDepthMarketData['UpdateTime'][-5:-3]) % 30 == 0 # 30分钟K线条件
# is_new_hour = is_new_1minute and int(pDepthMarketData['UpdateTime'][-5:-3]) % 60 == 0 # 60分钟K线条件
# # 新K线开始
if is_new_1minute and self.bar_dict[pDepthMarketData['InstrumentID']]["UpdateTime"] != b"99:99:99":
for md_queue in self.md_queue_list:
md_queue.put(self.bar_dict[pDepthMarketData['InstrumentID']])
# 将Tick池化为Bar
tick_to_bar(self.bar_dict[pDepthMarketData['InstrumentID']], pDepthMarketData, is_new_1minute)
class MdRecorder(MdApiBase):
def __init__(self, broker_id, md_server, investor_id, password, app_id, auth_code, instrument_id_list, md_queue_list=None, page_dir='', using_udp=False, multicast=False):
pass
def init_extra(self):
self.csv_file_dict = {}
self.csv_writer = {}
# 深度行情结构体字段名列表
header = list(DepthMarketDataField().to_dict())
for instrument_id in self.instrument_id_list:
instrument_id = to_str(instrument_id)
# file object
file_dir = os.path.join(self.page_dir, f'{instrument_id}-{to_str(self.GetTradingDay())}.csv')
self.csv_file_dict[instrument_id] = open(file_dir, 'a', newline='')
# writer object
self.csv_writer[instrument_id] = csv.DictWriter(self.csv_file_dict[instrument_id], header)
# 写入表头
self.csv_writer[instrument_id].writeheader()
self.csv_file_dict[instrument_id].flush()
# ///深度行情通知
def OnRtnDepthMarketData(self, pDepthMarketData):
try:
for key in pDepthMarketData.keys():
pDepthMarketData[key] = to_str(pDepthMarketData[key])
# 写入行情
self.csv_writer[pDepthMarketData['InstrumentID']].writerow(pDepthMarketData)
self.csv_file_dict[pDepthMarketData['InstrumentID']].flush()
except Exception as err_msg:
self.write_log(err_msg, pDepthMarketData)
def run_api(api_cls, account, md_queue_list=None):
if isinstance(account, FutureAccount):
tick_engine = api_cls(
account.broker_id,
account.server_dict['MDServer'],
account.investor_id,
account.password,
account.app_id,
account.auth_code,
account.instrument_id_list,
md_queue_list,
account.md_page_dir
)
tick_engine.Join()
def run_tick_engine(account, md_queue_list):
run_api(TickEngine, account, md_queue_list)
def run_bar_engine(account, md_queue_list):
run_api(BarEngine, account, md_queue_list)
def run_mdrecorder(account):
run_api(MdRecorder, account, None)
|
AlgoPlus
|
/CTP/MdApi.py
|
MdApi.py
|
# AlgoPlus量化投资开源框架
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
import os
BASE_LOCATION = "."
MD_LOCATION = BASE_LOCATION + os.path.sep + "MarketData"
TD_LOCATION = BASE_LOCATION + os.path.sep + "TradingData"
SD_LOCATION = BASE_LOCATION + os.path.sep + "StrategyData"
SIMNOW_SERVER = {
'电信1': {'TDServer': "180.168.146.187:10100", 'MDServer': '180.168.146.187:10110'},
'电信2': {'TDServer': "180.168.146.187:10101", 'MDServer': '180.168.146.187:10111'},
'移动': {'TDServer': "218.202.237.33:10102", 'MDServer': '218.202.237.33:10112'},
'TEST': {'TDServer': "180.168.146.187:10130", 'MDServer': '180.168.146.187:10131'},
}
class FutureAccount:
def __init__(self, broker_id, server_dict, reserve_server_dict, investor_id, password, app_id, auth_code, instrument_id_list, md_page_dir=MD_LOCATION, td_page_dir=TD_LOCATION):
self.broker_id = broker_id # 期货公司BrokerID
self.server_dict = server_dict # 登录的服务器地址
self.reserve_server_dict = reserve_server_dict # 备用服务器地址
self.investor_id = investor_id # 账户
self.password = password # 密码
self.app_id = app_id # 认证使用AppID
self.auth_code = auth_code # 认证使用授权码
self.instrument_id_list = instrument_id_list # 订阅合约列表[]
self.md_page_dir = md_page_dir # MdApi流文件存储地址,默认MD_LOCATION
self.td_page_dir = td_page_dir # TraderApi流文件存储地址,默认TD_LOCATION
def get_simnow_account(investor_id, password, instrument_id_list=None, server_name='电信1', md_page_dir=MD_LOCATION, td_page_dir=TD_LOCATION):
if server_name not in SIMNOW_SERVER.keys():
print(f'{server_name}不在可选列表[电信1, 电信2, 移动, TEST]中,默认使用电信1。')
server_name = '电信1'
if instrument_id_list is None:
instrument_id_list = []
investor_id = investor_id if isinstance(investor_id, bytes) else investor_id.encode(encoding='utf-8')
password = password if isinstance(password, bytes) else password.encode(encoding='utf-8')
return FutureAccount(
broker_id='9999', # 期货公司BrokerID
server_dict=SIMNOW_SERVER[server_name], # TDServer为交易服务器,MDServer为行情服务器。服务器地址格式为"ip:port。"
reserve_server_dict={},
investor_id=investor_id, # 账户
password=password, # 密码
app_id='simnow_client_test', # 认证使用AppID
auth_code='0000000000000000', # 认证使用授权码
instrument_id_list=instrument_id_list, # 订阅合约列表
md_page_dir=md_page_dir, # MdApi流文件存储地址,默认MD_LOCATION
td_page_dir=td_page_dir # TraderApi流文件存储地址,默认TD_LOCATION
)
|
AlgoPlus
|
/CTP/FutureAccount.py
|
FutureAccount.py
|
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
# 项目地址:https://gitee.com/AlgoPlus/
import time
from AlgoPlus.CTP.TraderApiBase import TraderApiBase
from AlgoPlus.CTP.FutureAccount import FutureAccount
class AuthenticateHelper(TraderApiBase):
def __init__(self, broker_id, td_server, investor_id, password, app_id, auth_code, md_queue=None, flow_path='', private_resume_type=2, public_resume_type=2):
pass
def init_extra(self):
"""
初始化策略参数
:return:
"""
# {
# 'ExchangeID': b'', # 交易所
# 'InstrumentID': b'', # 合约代码
# 'UpperLimitPrice': 0.0, # 涨停板
# 'LowerLimitPrice': 0.0, # 跌停板
# 'Volume': 1, # 报单手数
# }
self.parameter_dict = self.md_queue.get(block=False)
# ############################################################################# #
def OnRtnOrder(self, pOrder):
# self.write_log('OnRtnOrder', pOrder)
pass
# ############################################################################# #
def OnRtnTrade(self, pTrade):
# self.write_log('OnRtnTrade', pTrade)
pass
def OnRspQryOrder(self, pOrder, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryOrder', "查询结果,避免内容太长不输出。")
def OnRspQryTrade(self, pTrade, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryTrade', "查询结果,避免内容太长不输出。")
def OnRspQryInvestorPosition(self, pInvestorPosition, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryInvestorPosition', "查询结果,避免内容太长不输出。")
def OnRspQryTradingAccount(self, pTradingAccount, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryTradingAccount', "查询结果,避免内容太长不输出。")
def Join(self):
while True:
if self.status >= 0 and isinstance(self.parameter_dict, dict):
# ############################################################################# #
# 连续5次买开 - 卖平
ikk = 0
while ikk < 5:
ikk += 1
self.buy_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['UpperLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>{ikk}=>发出涨停买开仓请求!")
time.sleep(3)
# 跌停卖平仓
self.sell_close(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['LowerLimitPrice'], self.parameter_dict['Volume'], True)
self.write_log(f"=>{ikk}=>发出跌停卖平仓请求!")
# ############################################################################# #
# 连续5次卖开 - 买平
ikk = 0
while ikk < 5:
ikk += 1
# 跌停卖开仓
self.sell_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['LowerLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>{ikk}=>发出跌停卖开仓请求!")
time.sleep(3)
# 涨停买平仓
self.buy_close(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['UpperLimitPrice'], self.parameter_dict['Volume'], True)
self.write_log(f"=>{ikk}=>发出涨停买平仓请求!")
# ############################################################################# #
# 买开 - 撤单
self.buy_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['LowerLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>发出涨停买开仓请求!")
time.sleep(3)
# 撤单
self.cancel_order(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.order_ref)
self.write_log(f"=>发出撤单请求!")
# ############################################################################# #
# 卖开 - 撤单
self.sell_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['UpperLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>发出跌停卖开仓请求!")
time.sleep(3)
# 撤单
self.cancel_order(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.order_ref)
self.write_log(f"=>发出撤单请求!")
# ############################################################################# #
# 查询订单
self.query_order()
self.write_log(f"=>发出查询订单请求!")
time.sleep(3)
# ############################################################################# #
# 查询成交
self.query_trade()
self.write_log(f"=>发出查询成交请求!")
time.sleep(3)
# ############################################################################# #
# 查询持仓
self.query_position()
self.write_log(f"=>发出查询持仓请求!")
time.sleep(3)
# ############################################################################# #
# 查询资金
self.query_trading_account()
self.write_log(f"=>发出查询资金请求!")
time.sleep(3)
# ############################################################################# #
print("看穿式监管认证仿真交易已经完成!可联系期货公司!")
break
time.sleep(1)
def run_authenticate(account, md_queue):
if isinstance(account, FutureAccount):
trader_engine = AuthenticateHelper(
account.broker_id,
account.server_dict['TDServer'],
account.investor_id,
account.password,
account.app_id,
account.auth_code,
md_queue,
account.td_flow_path
)
trader_engine.Join()
|
AlgoPlus
|
/CTP/AuthenticateHelper.py
|
AuthenticateHelper.py
|
# AlgoPlus量化投资开源框架
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
#///正常
ExchangeProperty_Normal = b'0'
#///根据成交生成报单
ExchangeProperty_GenOrderByTrade = b'1'
#///组织机构代码
IdCardType_EID = b'0'
#///中国公民身份证
IdCardType_IDCard = b'1'
#///军官证
IdCardType_OfficerIDCard = b'2'
#///警官证
IdCardType_PoliceIDCard = b'3'
#///士兵证
IdCardType_SoldierIDCard = b'4'
#///户口簿
IdCardType_HouseholdRegister = b'5'
#///护照
IdCardType_Passport = b'6'
#///台胞证
IdCardType_TaiwanCompatriotIDCard = b'7'
#///回乡证
IdCardType_HomeComingCard = b'8'
#///营业执照号
IdCardType_LicenseNo = b'9'
#///税务登记号/当地纳税ID
IdCardType_TaxNo = b'A'
#///港澳居民来往内地通行证
IdCardType_HMMainlandTravelPermit = b'B'
#///台湾居民来往大陆通行证
IdCardType_TwMainlandTravelPermit = b'C'
#///驾照
IdCardType_DrivingLicense = b'D'
#///当地社保ID
IdCardType_SocialID = b'F'
#///当地身份证
IdCardType_LocalID = b'G'
#///商业登记证
IdCardType_BusinessRegistration = b'H'
#///港澳永久性居民身份证
IdCardType_HKMCIDCard = b'I'
#///人行开户许可证
IdCardType_AccountsPermits = b'J'
#///外国人永久居留证
IdCardType_FrgPrmtRdCard = b'K'
#///资管产品备案函
IdCardType_CptMngPrdLetter = b'L'
#///其他证件
IdCardType_OtherCard = b'x'
#///所有
InvestorRange_All = b'1'
#///投资者组
InvestorRange_Group = b'2'
#///单一投资者
InvestorRange_Single = b'3'
#///所有
DepartmentRange_All = b'1'
#///组织架构
DepartmentRange_Group = b'2'
#///单一投资者
DepartmentRange_Single = b'3'
#///未同步
DataSyncStatus_Asynchronous = b'1'
#///同步中
DataSyncStatus_Synchronizing = b'2'
#///已同步
DataSyncStatus_Synchronized = b'3'
#///已同步
BrokerDataSyncStatus_Synchronized = b'1'
#///同步中
BrokerDataSyncStatus_Synchronizing = b'2'
#///没有任何连接
ExchangeConnectStatus_NoConnection = b'1'
#///已经发出合约查询请求
ExchangeConnectStatus_QryInstrumentSent = b'2'
#///已经获取信息
ExchangeConnectStatus_GotInformation = b'9'
#///没有任何连接
TraderConnectStatus_NotConnected = b'1'
#///已经连接
TraderConnectStatus_Connected = b'2'
#///已经发出合约查询请求
TraderConnectStatus_QryInstrumentSent = b'3'
#///订阅私有流
TraderConnectStatus_SubPrivateFlow = b'4'
#///数据异步化
FunctionCode_DataAsync = b'1'
#///强制用户登出
FunctionCode_ForceUserLogout = b'2'
#///变更管理用户口令
FunctionCode_UserPasswordUpdate = b'3'
#///变更经纪公司口令
FunctionCode_BrokerPasswordUpdate = b'4'
#///变更投资者口令
FunctionCode_InvestorPasswordUpdate = b'5'
#///报单插入
FunctionCode_OrderInsert = b'6'
#///报单操作
FunctionCode_OrderAction = b'7'
#///同步系统数据
FunctionCode_SyncSystemData = b'8'
#///同步经纪公司数据
FunctionCode_SyncBrokerData = b'9'
#///批量同步经纪公司数据
FunctionCode_BachSyncBrokerData = b'A'
#///超级查询
FunctionCode_SuperQuery = b'B'
#///预埋报单插入
FunctionCode_ParkedOrderInsert = b'C'
#///预埋报单操作
FunctionCode_ParkedOrderAction = b'D'
#///同步动态令牌
FunctionCode_SyncOTP = b'E'
#///删除未知单
FunctionCode_DeleteOrder = b'F'
#///强制用户登出
BrokerFunctionCode_ForceUserLogout = b'1'
#///变更用户口令
BrokerFunctionCode_UserPasswordUpdate = b'2'
#///同步经纪公司数据
BrokerFunctionCode_SyncBrokerData = b'3'
#///批量同步经纪公司数据
BrokerFunctionCode_BachSyncBrokerData = b'4'
#///报单插入
BrokerFunctionCode_OrderInsert = b'5'
#///报单操作
BrokerFunctionCode_OrderAction = b'6'
#///全部查询
BrokerFunctionCode_AllQuery = b'7'
#///系统功能:登入/登出/修改密码等
BrokerFunctionCode_log = b'a'
#///基本查询:查询基础数据,如合约,交易所等常量
BrokerFunctionCode_BaseQry = b'b'
#///交易查询:如查成交,委托
BrokerFunctionCode_TradeQry = b'c'
#///交易功能:报单,撤单
BrokerFunctionCode_Trade = b'd'
#///银期转账
BrokerFunctionCode_Virement = b'e'
#///风险监控
BrokerFunctionCode_Risk = b'f'
#///查询/管理:查询会话,踢人等
BrokerFunctionCode_Session = b'g'
#///风控通知控制
BrokerFunctionCode_RiskNoticeCtl = b'h'
#///风控通知发送
BrokerFunctionCode_RiskNotice = b'i'
#///察看经纪公司资金权限
BrokerFunctionCode_BrokerDeposit = b'j'
#///资金查询
BrokerFunctionCode_QueryFund = b'k'
#///报单查询
BrokerFunctionCode_QueryOrder = b'l'
#///成交查询
BrokerFunctionCode_QueryTrade = b'm'
#///持仓查询
BrokerFunctionCode_QueryPosition = b'n'
#///行情查询
BrokerFunctionCode_QueryMarketData = b'o'
#///用户事件查询
BrokerFunctionCode_QueryUserEvent = b'p'
#///风险通知查询
BrokerFunctionCode_QueryRiskNotify = b'q'
#///出入金查询
BrokerFunctionCode_QueryFundChange = b'r'
#///投资者信息查询
BrokerFunctionCode_QueryInvestor = b's'
#///交易编码查询
BrokerFunctionCode_QueryTradingCode = b't'
#///强平
BrokerFunctionCode_ForceClose = b'u'
#///压力测试
BrokerFunctionCode_PressTest = b'v'
#///权益反算
BrokerFunctionCode_RemainCalc = b'w'
#///净持仓保证金指标
BrokerFunctionCode_NetPositionInd = b'x'
#///风险预算
BrokerFunctionCode_RiskPredict = b'y'
#///数据导出
BrokerFunctionCode_DataExport = b'z'
#///风控指标设置
BrokerFunctionCode_RiskTargetSetup = b'A'
#///行情预警
BrokerFunctionCode_MarketDataWarn = b'B'
#///业务通知查询
BrokerFunctionCode_QryBizNotice = b'C'
#///业务通知模板设置
BrokerFunctionCode_CfgBizNotice = b'D'
#///同步动态令牌
BrokerFunctionCode_SyncOTP = b'E'
#///发送业务通知
BrokerFunctionCode_SendBizNotice = b'F'
#///风险级别标准设置
BrokerFunctionCode_CfgRiskLevelStd = b'G'
#///交易终端应急功能
BrokerFunctionCode_TbCommand = b'H'
#///删除未知单
BrokerFunctionCode_DeleteOrder = b'J'
#///预埋报单插入
BrokerFunctionCode_ParkedOrderInsert = b'K'
#///预埋报单操作
BrokerFunctionCode_ParkedOrderAction = b'L'
#///资金不够仍允许行权
BrokerFunctionCode_ExecOrderNoCheck = b'M'
#///指定
BrokerFunctionCode_Designate = b'N'
#///证券处置
BrokerFunctionCode_StockDisposal = b'O'
#///席位资金预警
BrokerFunctionCode_BrokerDepositWarn = b'Q'
#///备兑不足预警
BrokerFunctionCode_CoverWarn = b'S'
#///行权试算
BrokerFunctionCode_PreExecOrder = b'T'
#///行权交收风险
BrokerFunctionCode_ExecOrderRisk = b'P'
#///持仓限额预警
BrokerFunctionCode_PosiLimitWarn = b'U'
#///持仓限额查询
BrokerFunctionCode_QryPosiLimit = b'V'
#///银期签到签退
BrokerFunctionCode_FBSign = b'W'
#///银期签约解约
BrokerFunctionCode_FBAccount = b'X'
#///已经提交
OrderActionStatus_Submitted = b'a'
#///已经接受
OrderActionStatus_Accepted = b'b'
#///已经被拒绝
OrderActionStatus_Rejected = b'c'
#///全部成交
OrderStatus_AllTraded = b'0'
#///部分成交还在队列中
OrderStatus_PartTradedQueueing = b'1'
#///部分成交不在队列中
OrderStatus_PartTradedNotQueueing = b'2'
#///未成交还在队列中
OrderStatus_NoTradeQueueing = b'3'
#///未成交不在队列中
OrderStatus_NoTradeNotQueueing = b'4'
#///撤单
OrderStatus_Canceled = b'5'
#///未知
OrderStatus_Unknown = b'a'
#///尚未触发
OrderStatus_NotTouched = b'b'
#///已触发
OrderStatus_Touched = b'c'
#///已经提交
OrderSubmitStatus_InsertSubmitted = b'0'
#///撤单已经提交
OrderSubmitStatus_CancelSubmitted = b'1'
#///修改已经提交
OrderSubmitStatus_ModifySubmitted = b'2'
#///已经接受
OrderSubmitStatus_Accepted = b'3'
#///报单已经被拒绝
OrderSubmitStatus_InsertRejected = b'4'
#///撤单已经被拒绝
OrderSubmitStatus_CancelRejected = b'5'
#///改单已经被拒绝
OrderSubmitStatus_ModifyRejected = b'6'
#///今日持仓
PositionDate_Today = b'1'
#///历史持仓
PositionDate_History = b'2'
#///使用历史持仓
PositionDateType_UseHistory = b'1'
#///不使用历史持仓
PositionDateType_NoUseHistory = b'2'
#///代理
TradingRole_Broker = b'1'
#///自营
TradingRole_Host = b'2'
#///做市商
TradingRole_Maker = b'3'
#///期货
ProductClass_Futures = b'1'
#///期货期权
ProductClass_Options = b'2'
#///组合
ProductClass_Combination = b'3'
#///即期
ProductClass_Spot = b'4'
#///期转现
ProductClass_EFP = b'5'
#///现货期权
ProductClass_SpotOption = b'6'
#///未上市
InstLifePhase_NotStart = b'0'
#///上市
InstLifePhase_Started = b'1'
#///停牌
InstLifePhase_Pause = b'2'
#///到期
InstLifePhase_Expired = b'3'
#///买
Direction_Buy = b'0'
#///卖
Direction_Sell = b'1'
#///净持仓
PositionType_Net = b'1'
#///综合持仓
PositionType_Gross = b'2'
#///净
PosiDirection_Net = b'1'
#///多头
PosiDirection_Long = b'2'
#///空头
PosiDirection_Short = b'3'
#///不活跃
SysSettlementStatus_NonActive = b'1'
#///启动
SysSettlementStatus_Startup = b'2'
#///操作
SysSettlementStatus_Operating = b'3'
#///结算
SysSettlementStatus_Settlement = b'4'
#///结算完成
SysSettlementStatus_SettlementFinished = b'5'
#///交易费率
RatioAttr_Trade = b'0'
#///结算费率
RatioAttr_Settlement = b'1'
#///投机
HedgeFlag_Speculation = b'1'
#///套利
HedgeFlag_Arbitrage = b'2'
#///套保
HedgeFlag_Hedge = b'3'
#///做市商
HedgeFlag_MarketMaker = b'5'
#///第一腿投机第二腿套保 大商所专用
HedgeFlag_SpecHedge = b'6'
#///第一腿套保第二腿投机 大商所专用
HedgeFlag_HedgeSpec = b'7'
#///投机
BillHedgeFlag_Speculation = b'1'
#///套利
BillHedgeFlag_Arbitrage = b'2'
#///套保
BillHedgeFlag_Hedge = b'3'
#///投机
ClientIDType_Speculation = b'1'
#///套利
ClientIDType_Arbitrage = b'2'
#///套保
ClientIDType_Hedge = b'3'
#///做市商
ClientIDType_MarketMaker = b'5'
#///任意价
OrderPriceType_AnyPrice = b'1'
#///限价
OrderPriceType_LimitPrice = b'2'
#///最优价
OrderPriceType_BestPrice = b'3'
#///最新价
OrderPriceType_LastPrice = b'4'
#///最新价浮动上浮1个ticks
OrderPriceType_LastPricePlusOneTicks = b'5'
#///最新价浮动上浮2个ticks
OrderPriceType_LastPricePlusTwoTicks = b'6'
#///最新价浮动上浮3个ticks
OrderPriceType_LastPricePlusThreeTicks = b'7'
#///卖一价
OrderPriceType_AskPrice1 = b'8'
#///卖一价浮动上浮1个ticks
OrderPriceType_AskPrice1PlusOneTicks = b'9'
#///卖一价浮动上浮2个ticks
OrderPriceType_AskPrice1PlusTwoTicks = b'A'
#///卖一价浮动上浮3个ticks
OrderPriceType_AskPrice1PlusThreeTicks = b'B'
#///买一价
OrderPriceType_BidPrice1 = b'C'
#///买一价浮动上浮1个ticks
OrderPriceType_BidPrice1PlusOneTicks = b'D'
#///买一价浮动上浮2个ticks
OrderPriceType_BidPrice1PlusTwoTicks = b'E'
#///买一价浮动上浮3个ticks
OrderPriceType_BidPrice1PlusThreeTicks = b'F'
#///五档价
OrderPriceType_FiveLevelPrice = b'G'
#///开仓
OffsetFlag_Open = b'0'
#///平仓
OffsetFlag_Close = b'1'
#///强平
OffsetFlag_ForceClose = b'2'
#///平今
OffsetFlag_CloseToday = b'3'
#///平昨
OffsetFlag_CloseYesterday = b'4'
#///强减
OffsetFlag_ForceOff = b'5'
#///本地强平
OffsetFlag_LocalForceClose = b'6'
#///非强平
ForceCloseReason_NotForceClose = b'0'
#///资金不足
ForceCloseReason_LackDeposit = b'1'
#///客户超仓
ForceCloseReason_ClientOverPositionLimit = b'2'
#///会员超仓
ForceCloseReason_MemberOverPositionLimit = b'3'
#///持仓非整数倍
ForceCloseReason_NotMultiple = b'4'
#///违规
ForceCloseReason_Violation = b'5'
#///其它
ForceCloseReason_Other = b'6'
#///自然人临近交割
ForceCloseReason_PersonDeliv = b'7'
#///正常
OrderType_Normal = b'0'
#///报价衍生
OrderType_DeriveFromQuote = b'1'
#///组合衍生
OrderType_DeriveFromCombination = b'2'
#///组合报单
OrderType_Combination = b'3'
#///条件单
OrderType_ConditionalOrder = b'4'
#///互换单
OrderType_Swap = b'5'
#///大宗交易成交衍生
OrderType_DeriveFromBlockTrade = b'6'
#///期转现成交衍生
OrderType_DeriveFromEFPTrade = b'7'
#///立即完成,否则撤销
TimeCondition_IOC = b'1'
#///本节有效
TimeCondition_GFS = b'2'
#///当日有效
TimeCondition_GFD = b'3'
#///指定日期前有效
TimeCondition_GTD = b'4'
#///撤销前有效
TimeCondition_GTC = b'5'
#///集合竞价有效
TimeCondition_GFA = b'6'
#///任何数量
VolumeCondition_AV = b'1'
#///最小数量
VolumeCondition_MV = b'2'
#///全部数量
VolumeCondition_CV = b'3'
#///立即
ContingentCondition_Immediately = b'1'
#///止损
ContingentCondition_Touch = b'2'
#///止赢
ContingentCondition_TouchProfit = b'3'
#///预埋单
ContingentCondition_ParkedOrder = b'4'
#///最新价大于条件价
ContingentCondition_LastPriceGreaterThanStopPrice = b'5'
#///最新价大于等于条件价
ContingentCondition_LastPriceGreaterEqualStopPrice = b'6'
#///最新价小于条件价
ContingentCondition_LastPriceLesserThanStopPrice = b'7'
#///最新价小于等于条件价
ContingentCondition_LastPriceLesserEqualStopPrice = b'8'
#///卖一价大于条件价
ContingentCondition_AskPriceGreaterThanStopPrice = b'9'
#///卖一价大于等于条件价
ContingentCondition_AskPriceGreaterEqualStopPrice = b'A'
#///卖一价小于条件价
ContingentCondition_AskPriceLesserThanStopPrice = b'B'
#///卖一价小于等于条件价
ContingentCondition_AskPriceLesserEqualStopPrice = b'C'
#///买一价大于条件价
ContingentCondition_BidPriceGreaterThanStopPrice = b'D'
#///买一价大于等于条件价
ContingentCondition_BidPriceGreaterEqualStopPrice = b'E'
#///买一价小于条件价
ContingentCondition_BidPriceLesserThanStopPrice = b'F'
#///买一价小于等于条件价
ContingentCondition_BidPriceLesserEqualStopPrice = b'H'
#///删除
ActionFlag_Delete = b'0'
#///修改
ActionFlag_Modify = b'3'
#///可以交易
TradingRight_Allow = b'0'
#///只能平仓
TradingRight_CloseOnly = b'1'
#///不能交易
TradingRight_Forbidden = b'2'
#///来自参与者
OrderSource_Participant = b'0'
#///来自管理员
OrderSource_Administrator = b'1'
#///组合持仓拆分为单一持仓,初始化不应包含该类型的持仓
TradeType_SplitCombination = b'#'
#///普通成交
TradeType_Common = b'0'
#///期权执行
TradeType_OptionsExecution = b'1'
#///OTC成交
TradeType_OTC = b'2'
#///期转现衍生成交
TradeType_EFPDerived = b'3'
#///组合衍生成交
TradeType_CombinationDerived = b'4'
#///大宗交易成交
TradeType_BlockTrade = b'5'
#///前成交价
PriceSource_LastPrice = b'0'
#///买委托价
PriceSource_Buy = b'1'
#///卖委托价
PriceSource_Sell = b'2'
#///场外成交价
PriceSource_OTC = b'3'
#///开盘前
InstrumentStatus_BeforeTrading = b'0'
#///非交易
InstrumentStatus_NoTrading = b'1'
#///连续交易
InstrumentStatus_Continous = b'2'
#///集合竞价报单
InstrumentStatus_AuctionOrdering = b'3'
#///集合竞价价格平衡
InstrumentStatus_AuctionBalance = b'4'
#///集合竞价撮合
InstrumentStatus_AuctionMatch = b'5'
#///收盘
InstrumentStatus_Closed = b'6'
#///自动切换
InstStatusEnterReason_Automatic = b'1'
#///手动切换
InstStatusEnterReason_Manual = b'2'
#///熔断
InstStatusEnterReason_Fuse = b'3'
#///未上传
BatchStatus_NoUpload = b'1'
#///已上传
BatchStatus_Uploaded = b'2'
#///审核失败
BatchStatus_Failed = b'3'
#///按所有品种
ReturnStyle_All = b'1'
#///按品种
ReturnStyle_ByProduct = b'2'
#///按成交手数
ReturnPattern_ByVolume = b'1'
#///按留存手续费
ReturnPattern_ByFeeOnHand = b'2'
#///级别1
ReturnLevel_Level1 = b'1'
#///级别2
ReturnLevel_Level2 = b'2'
#///级别3
ReturnLevel_Level3 = b'3'
#///级别4
ReturnLevel_Level4 = b'4'
#///级别5
ReturnLevel_Level5 = b'5'
#///级别6
ReturnLevel_Level6 = b'6'
#///级别7
ReturnLevel_Level7 = b'7'
#///级别8
ReturnLevel_Level8 = b'8'
#///级别9
ReturnLevel_Level9 = b'9'
#///分阶段返还
ReturnStandard_ByPeriod = b'1'
#///按某一标准
ReturnStandard_ByStandard = b'2'
#///质出
MortgageType_Out = b'0'
#///质入
MortgageType_In = b'1'
#///质押比例
InvestorSettlementParamID_MortgageRatio = b'4'
#///保证金算法
InvestorSettlementParamID_MarginWay = b'5'
#///结算单结存是否包含质押
InvestorSettlementParamID_BillDeposit = b'9'
#///质押比例
ExchangeSettlementParamID_MortgageRatio = b'1'
#///分项资金导入项
ExchangeSettlementParamID_OtherFundItem = b'2'
#///分项资金入交易所出入金
ExchangeSettlementParamID_OtherFundImport = b'3'
#///中金所开户最低可用金额
ExchangeSettlementParamID_CFFEXMinPrepa = b'6'
#///郑商所结算方式
ExchangeSettlementParamID_CZCESettlementType = b'7'
#///交易所交割手续费收取方式
ExchangeSettlementParamID_ExchDelivFeeMode = b'9'
#///投资者交割手续费收取方式
ExchangeSettlementParamID_DelivFeeMode = b'0'
#///郑商所组合持仓保证金收取方式
ExchangeSettlementParamID_CZCEComMarginType = b'A'
#///大商所套利保证金是否优惠
ExchangeSettlementParamID_DceComMarginType = b'B'
#///虚值期权保证金优惠比率
ExchangeSettlementParamID_OptOutDisCountRate = b'a'
#///最低保障系数
ExchangeSettlementParamID_OptMiniGuarantee = b'b'
#///投资者代码最小长度
SystemParamID_InvestorIDMinLength = b'1'
#///投资者帐号代码最小长度
SystemParamID_AccountIDMinLength = b'2'
#///投资者开户默认登录权限
SystemParamID_UserRightLogon = b'3'
#///投资者交易结算单成交汇总方式
SystemParamID_SettlementBillTrade = b'4'
#///统一开户更新交易编码方式
SystemParamID_TradingCode = b'5'
#///结算是否判断存在未复核的出入金和分项资金
SystemParamID_CheckFund = b'6'
#///是否启用手续费模板数据权限
SystemParamID_CommModelRight = b'7'
#///是否启用保证金率模板数据权限
SystemParamID_MarginModelRight = b'9'
#///是否规范用户才能激活
SystemParamID_IsStandardActive = b'8'
#///上传的交易所结算文件路径
SystemParamID_UploadSettlementFile = b'U'
#///上报保证金监控中心文件路径
SystemParamID_DownloadCSRCFile = b'D'
#///生成的结算单文件路径
SystemParamID_SettlementBillFile = b'S'
#///证监会文件标识
SystemParamID_CSRCOthersFile = b'C'
#///投资者照片路径
SystemParamID_InvestorPhoto = b'P'
#///全结经纪公司上传文件路径
SystemParamID_CSRCData = b'R'
#///开户密码录入方式
SystemParamID_InvestorPwdModel = b'I'
#///投资者中金所结算文件下载路径
SystemParamID_CFFEXInvestorSettleFile = b'F'
#///投资者代码编码方式
SystemParamID_InvestorIDType = b'a'
#///休眠户最高权益
SystemParamID_FreezeMaxReMain = b'r'
#///手续费相关操作实时上场开关
SystemParamID_IsSync = b'A'
#///解除开仓权限限制
SystemParamID_RelieveOpenLimit = b'O'
#///是否规范用户才能休眠
SystemParamID_IsStandardFreeze = b'X'
#///郑商所是否开放所有品种套保交易
SystemParamID_CZCENormalProductHedge = b'B'
#///系统加密算法
TradeParamID_EncryptionStandard = b'E'
#///系统风险算法
TradeParamID_RiskMode = b'R'
#///系统风险算法是否全局 0-否 1-是
TradeParamID_RiskModeGlobal = b'G'
#///密码加密算法
TradeParamID_modeEncode = b'P'
#///价格小数位数参数
TradeParamID_tickMode = b'T'
#///用户最大会话数
TradeParamID_SingleUserSessionMaxNum = b'S'
#///最大连续登录失败数
TradeParamID_LoginFailMaxNum = b'L'
#///是否强制认证
TradeParamID_IsAuthForce = b'A'
#///是否冻结证券持仓
TradeParamID_IsPosiFreeze = b'F'
#///是否限仓
TradeParamID_IsPosiLimit = b'M'
#///郑商所询价时间间隔
TradeParamID_ForQuoteTimeInterval = b'Q'
#///是否期货限仓
TradeParamID_IsFuturePosiLimit = b'B'
#///是否期货下单频率限制
TradeParamID_IsFutureOrderFreq = b'C'
#///行权冻结是否计算盈利
TradeParamID_IsExecOrderProfit = b'H'
#///银期开户是否验证开户银行卡号是否是预留银行账户
TradeParamID_IsCheckBankAcc = b'I'
#///弱密码最后修改日期
TradeParamID_PasswordDeadLine = b'J'
#///强密码校验
TradeParamID_IsStrongPassword = b'K'
#///自有资金质押比
TradeParamID_BalanceMorgage = b'a'
#///最小密码长度
TradeParamID_MinPwdLen = b'O'
#///IP当日最大登陆失败次数
TradeParamID_LoginFailMaxNumForIP = b'U'
#///密码有效期
TradeParamID_PasswordPeriod = b'V'
#///资金数据
FileID_SettlementFund = b'F'
#///成交数据
FileID_Trade = b'T'
#///投资者持仓数据
FileID_InvestorPosition = b'P'
#///投资者分项资金数据
FileID_SubEntryFund = b'O'
#///组合持仓数据
FileID_CZCECombinationPos = b'C'
#///上报保证金监控中心数据
FileID_CSRCData = b'R'
#///郑商所平仓了结数据
FileID_CZCEClose = b'L'
#///郑商所非平仓了结数据
FileID_CZCENoClose = b'N'
#///持仓明细数据
FileID_PositionDtl = b'D'
#///期权执行文件
FileID_OptionStrike = b'S'
#///结算价比对文件
FileID_SettlementPriceComparison = b'M'
#///上期所非持仓变动明细
FileID_NonTradePosChange = b'B'
#///结算
FileType_Settlement = b'0'
#///核对
FileType_Check = b'1'
#///文本文件(.txt)
FileFormat_Txt = b'0'
#///压缩文件(.zip)
FileFormat_Zip = b'1'
#///DBF文件(.dbf)
FileFormat_DBF = b'2'
#///上传成功
FileUploadStatus_SucceedUpload = b'1'
#///上传失败
FileUploadStatus_FailedUpload = b'2'
#///导入成功
FileUploadStatus_SucceedLoad = b'3'
#///导入部分成功
FileUploadStatus_PartSucceedLoad = b'4'
#///导入失败
FileUploadStatus_FailedLoad = b'5'
#///移出
TransferDirection_Out = b'0'
#///移入
TransferDirection_In = b'1'
#///没有特殊创建规则
SpecialCreateRule_NoSpecialRule = b'0'
#///不包含春节
SpecialCreateRule_NoSpringFestival = b'1'
#///上一合约结算价
BasisPriceType_LastSettlement = b'1'
#///上一合约收盘价
BasisPriceType_LaseClose = b'2'
#///活跃
ProductLifePhase_Active = b'1'
#///不活跃
ProductLifePhase_NonActive = b'2'
#///注销
ProductLifePhase_Canceled = b'3'
#///现金交割
DeliveryMode_CashDeliv = b'1'
#///实物交割
DeliveryMode_CommodityDeliv = b'2'
#///出入金
FundIOType_FundIO = b'1'
#///银期转帐
FundIOType_Transfer = b'2'
#///银期换汇
FundIOType_SwapCurrency = b'3'
#///银行存款
FundType_Deposite = b'1'
#///分项资金
FundType_ItemFund = b'2'
#///公司调整
FundType_Company = b'3'
#///资金内转
FundType_InnerTransfer = b'4'
#///入金
FundDirection_In = b'1'
#///出金
FundDirection_Out = b'2'
#///已录入
FundStatus_Record = b'1'
#///已复核
FundStatus_Check = b'2'
#///已冲销
FundStatus_Charge = b'3'
#///未发布
PublishStatus_None = b'1'
#///正在发布
PublishStatus_Publishing = b'2'
#///已发布
PublishStatus_Published = b'3'
#///不活跃
SystemStatus_NonActive = b'1'
#///启动
SystemStatus_Startup = b'2'
#///交易开始初始化
SystemStatus_Initialize = b'3'
#///交易完成初始化
SystemStatus_Initialized = b'4'
#///收市开始
SystemStatus_Close = b'5'
#///收市完成
SystemStatus_Closed = b'6'
#///结算
SystemStatus_Settlement = b'7'
#///初始
SettlementStatus_Initialize = b'0'
#///结算中
SettlementStatus_Settlementing = b'1'
#///已结算
SettlementStatus_Settlemented = b'2'
#///结算完成
SettlementStatus_Finished = b'3'
#///自然人
InvestorType_Person = b'0'
#///法人
InvestorType_Company = b'1'
#///投资基金
InvestorType_Fund = b'2'
#///特殊法人
InvestorType_SpecialOrgan = b'3'
#///资管户
InvestorType_Asset = b'4'
#///交易会员
BrokerType_Trade = b'0'
#///交易结算会员
BrokerType_TradeSettle = b'1'
#///低风险客户
RiskLevel_Low = b'1'
#///普通客户
RiskLevel_Normal = b'2'
#///关注客户
RiskLevel_Focus = b'3'
#///风险客户
RiskLevel_Risk = b'4'
#///按交易收取
FeeAcceptStyle_ByTrade = b'1'
#///按交割收取
FeeAcceptStyle_ByDeliv = b'2'
#///不收
FeeAcceptStyle_None = b'3'
#///按指定手续费收取
FeeAcceptStyle_FixFee = b'4'
#///交易密码
PasswordType_Trade = b'1'
#///资金密码
PasswordType_Account = b'2'
#///浮盈浮亏都计算
Algorithm_All = b'1'
#///浮盈不计,浮亏计
Algorithm_OnlyLost = b'2'
#///浮盈计,浮亏不计
Algorithm_OnlyGain = b'3'
#///浮盈浮亏都不计算
Algorithm_None = b'4'
#///包含平仓盈利
IncludeCloseProfit_Include = b'0'
#///不包含平仓盈利
IncludeCloseProfit_NotInclude = b'2'
#///无仓无成交不受可提比例限制
AllWithoutTrade_Enable = b'0'
#///受可提比例限制
AllWithoutTrade_Disable = b'2'
#///无仓不受可提比例限制
AllWithoutTrade_NoHoldEnable = b'3'
#///不核对
FuturePwdFlag_UnCheck = b'0'
#///核对
FuturePwdFlag_Check = b'1'
#///银行转期货
TransferType_BankToFuture = b'0'
#///期货转银行
TransferType_FutureToBank = b'1'
#///无效或失败
TransferValidFlag_Invalid = b'0'
#///有效
TransferValidFlag_Valid = b'1'
#///冲正
TransferValidFlag_Reverse = b'2'
#///错单
Reason_CD = b'0'
#///资金在途
Reason_ZT = b'1'
#///其它
Reason_QT = b'2'
#///未知
Sex_None = b'0'
#///男
Sex_Man = b'1'
#///女
Sex_Woman = b'2'
#///投资者
UserType_Investor = b'0'
#///操作员
UserType_Operator = b'1'
#///管理员
UserType_SuperUser = b'2'
#///保证金率
RateType_MarginRate = b'2'
#///交易结算单
NoteType_TradeSettleBill = b'1'
#///交易结算月报
NoteType_TradeSettleMonth = b'2'
#///追加保证金通知书
NoteType_CallMarginNotes = b'3'
#///强行平仓通知书
NoteType_ForceCloseNotes = b'4'
#///成交通知书
NoteType_TradeNotes = b'5'
#///交割通知书
NoteType_DelivNotes = b'6'
#///逐日盯市
SettlementStyle_Day = b'1'
#///逐笔对冲
SettlementStyle_Volume = b'2'
#///日报
SettlementBillType_Day = b'0'
#///月报
SettlementBillType_Month = b'1'
#///登录
UserRightType_Logon = b'1'
#///银期转帐
UserRightType_Transfer = b'2'
#///邮寄结算单
UserRightType_EMail = b'3'
#///传真结算单
UserRightType_Fax = b'4'
#///条件单
UserRightType_ConditionOrder = b'5'
#///昨结算价
MarginPriceType_PreSettlementPrice = b'1'
#///最新价
MarginPriceType_SettlementPrice = b'2'
#///成交均价
MarginPriceType_AveragePrice = b'3'
#///开仓价
MarginPriceType_OpenPrice = b'4'
#///未生成
BillGenStatus_None = b'0'
#///生成中
BillGenStatus_NoGenerated = b'1'
#///已生成
BillGenStatus_Generated = b'2'
#///持仓处理算法
AlgoType_HandlePositionAlgo = b'1'
#///寻找保证金率算法
AlgoType_FindMarginRateAlgo = b'2'
#///基本
HandlePositionAlgoID_Base = b'1'
#///大连商品交易所
HandlePositionAlgoID_DCE = b'2'
#///郑州商品交易所
HandlePositionAlgoID_CZCE = b'3'
#///基本
FindMarginRateAlgoID_Base = b'1'
#///大连商品交易所
FindMarginRateAlgoID_DCE = b'2'
#///郑州商品交易所
FindMarginRateAlgoID_CZCE = b'3'
#///基本
HandleTradingAccountAlgoID_Base = b'1'
#///大连商品交易所
HandleTradingAccountAlgoID_DCE = b'2'
#///郑州商品交易所
HandleTradingAccountAlgoID_CZCE = b'3'
#///指定下单人
PersonType_Order = b'1'
#///开户授权人
PersonType_Open = b'2'
#///资金调拨人
PersonType_Fund = b'3'
#///结算单确认人
PersonType_Settlement = b'4'
#///法人
PersonType_Company = b'5'
#///法人代表
PersonType_Corporation = b'6'
#///投资者联系人
PersonType_LinkMan = b'7'
#///分户管理资产负责人
PersonType_Ledger = b'8'
#///托(保)管人
PersonType_Trustee = b'9'
#///托(保)管机构法人代表
PersonType_TrusteeCorporation = b'A'
#///托(保)管机构开户授权人
PersonType_TrusteeOpen = b'B'
#///托(保)管机构联系人
PersonType_TrusteeContact = b'C'
#///境外自然人参考证件
PersonType_ForeignerRefer = b'D'
#///法人代表参考证件
PersonType_CorporationRefer = b'E'
#///所有
QueryInvestorRange_All = b'1'
#///查询分类
QueryInvestorRange_Group = b'2'
#///单一投资者
QueryInvestorRange_Single = b'3'
#///正常
InvestorRiskStatus_Normal = b'1'
#///警告
InvestorRiskStatus_Warn = b'2'
#///追保
InvestorRiskStatus_Call = b'3'
#///强平
InvestorRiskStatus_Force = b'4'
#///异常
InvestorRiskStatus_Exception = b'5'
#///登录
UserEventType_Login = b'1'
#///登出
UserEventType_Logout = b'2'
#///交易成功
UserEventType_Trading = b'3'
#///交易失败
UserEventType_TradingError = b'4'
#///修改密码
UserEventType_UpdatePassword = b'5'
#///客户端认证
UserEventType_Authenticate = b'6'
#///其他
UserEventType_Other = b'9'
#///先开先平
CloseStyle_Close = b'0'
#///先平今再平昨
CloseStyle_CloseToday = b'1'
#///----
StatMode_Non = b'0'
#///按合约统计
StatMode_Instrument = b'1'
#///按产品统计
StatMode_Product = b'2'
#///按投资者统计
StatMode_Investor = b'3'
#///未发送
ParkedOrderStatus_NotSend = b'1'
#///已发送
ParkedOrderStatus_Send = b'2'
#///已删除
ParkedOrderStatus_Deleted = b'3'
#///正在处理
VirDealStatus_Dealing = b'1'
#///处理成功
VirDealStatus_DeaclSucceed = b'2'
#///综合交易平台
OrgSystemID_Standard = b'0'
#///易盛系统
OrgSystemID_ESunny = b'1'
#///金仕达V6系统
OrgSystemID_KingStarV6 = b'2'
#///正常处理中
VirTradeStatus_NaturalDeal = b'0'
#///成功结束
VirTradeStatus_SucceedEnd = b'1'
#///失败结束
VirTradeStatus_FailedEND = b'2'
#///异常中
VirTradeStatus_Exception = b'3'
#///已人工异常处理
VirTradeStatus_ManualDeal = b'4'
#///通讯异常 ,请人工处理
VirTradeStatus_MesException = b'5'
#///系统出错,请人工处理
VirTradeStatus_SysException = b'6'
#///存折
VirBankAccType_BankBook = b'1'
#///储蓄卡
VirBankAccType_BankCard = b'2'
#///信用卡
VirBankAccType_CreditCard = b'3'
#///正常
VirementStatus_Natural = b'0'
#///销户
VirementStatus_Canceled = b'9'
#///未确认
VirementAvailAbility_NoAvailAbility = b'0'
#///有效
VirementAvailAbility_AvailAbility = b'1'
#///冲正
VirementAvailAbility_Repeal = b'2'
#///银行发起银行资金转期货
VirementTradeCode_BankBankToFuture = b'102001'
#///银行发起期货资金转银行
VirementTradeCode_BankFutureToBank = b'102002'
#///期货发起银行资金转期货
VirementTradeCode_FutureBankToFuture = b'202001'
#///期货发起期货资金转银行
VirementTradeCode_FutureFutureToBank = b'202002'
#///程序生成
AMLGenStatus_Program = b'0'
#///人工生成
AMLGenStatus_HandWork = b'1'
#///主动请求更新
CFMMCKeyKind_REQUEST = b'R'
#///CFMMC自动更新
CFMMCKeyKind_AUTO = b'A'
#///CFMMC手动更新
CFMMCKeyKind_MANUAL = b'M'
#///身份证
CertificationType_IDCard = b'0'
#///护照
CertificationType_Passport = b'1'
#///军官证
CertificationType_OfficerIDCard = b'2'
#///士兵证
CertificationType_SoldierIDCard = b'3'
#///回乡证
CertificationType_HomeComingCard = b'4'
#///户口簿
CertificationType_HouseholdRegister = b'5'
#///营业执照号
CertificationType_LicenseNo = b'6'
#///组织机构代码证
CertificationType_InstitutionCodeCard = b'7'
#///临时营业执照号
CertificationType_TempLicenseNo = b'8'
#///民办非企业登记证书
CertificationType_NoEnterpriseLicenseNo = b'9'
#///其他证件
CertificationType_OtherCard = b'x'
#///主管部门批文
CertificationType_SuperDepAgree = b'a'
#///其他
FileBusinessCode_Others = b'0'
#///转账交易明细对账
FileBusinessCode_TransferDetails = b'1'
#///客户账户状态对账
FileBusinessCode_CustAccStatus = b'2'
#///账户类交易明细对账
FileBusinessCode_AccountTradeDetails = b'3'
#///期货账户信息变更明细对账
FileBusinessCode_FutureAccountChangeInfoDetails = b'4'
#///客户资金台账余额明细对账
FileBusinessCode_CustMoneyDetail = b'5'
#///客户销户结息明细对账
FileBusinessCode_CustCancelAccountInfo = b'6'
#///客户资金余额对账结果
FileBusinessCode_CustMoneyResult = b'7'
#///其它对账异常结果文件
FileBusinessCode_OthersExceptionResult = b'8'
#///客户结息净额明细
FileBusinessCode_CustInterestNetMoneyDetails = b'9'
#///客户资金交收明细
FileBusinessCode_CustMoneySendAndReceiveDetails = b'a'
#///法人存管银行资金交收汇总
FileBusinessCode_CorporationMoneyTotal = b'b'
#///主体间资金交收汇总
FileBusinessCode_MainbodyMoneyTotal = b'c'
#///总分平衡监管数据
FileBusinessCode_MainPartMonitorData = b'd'
#///存管银行备付金余额
FileBusinessCode_PreparationMoney = b'e'
#///协办存管银行资金监管数据
FileBusinessCode_BankMoneyMonitorData = b'f'
#///汇
CashExchangeCode_Exchange = b'1'
#///钞
CashExchangeCode_Cash = b'2'
#///是
YesNoIndicator_Yes = b'0'
#///否
YesNoIndicator_No = b'1'
#///当前余额
BanlanceType_CurrentMoney = b'0'
#///可用余额
BanlanceType_UsableMoney = b'1'
#///可取余额
BanlanceType_FetchableMoney = b'2'
#///冻结余额
BanlanceType_FreezeMoney = b'3'
#///未知状态
Gender_Unknown = b'0'
#///男
Gender_Male = b'1'
#///女
Gender_Female = b'2'
#///由受益方支付费用
FeePayFlag_BEN = b'0'
#///由发送方支付费用
FeePayFlag_OUR = b'1'
#///由发送方支付发起的费用,受益方支付接受的费用
FeePayFlag_SHA = b'2'
#///交换密钥
PassWordKeyType_ExchangeKey = b'0'
#///密码密钥
PassWordKeyType_PassWordKey = b'1'
#///MAC密钥
PassWordKeyType_MACKey = b'2'
#///报文密钥
PassWordKeyType_MessageKey = b'3'
#///查询
FBTPassWordType_Query = b'0'
#///取款
FBTPassWordType_Fetch = b'1'
#///转帐
FBTPassWordType_Transfer = b'2'
#///交易
FBTPassWordType_Trade = b'3'
#///不加密
FBTEncryMode_NoEncry = b'0'
#///DES
FBTEncryMode_DES = b'1'
#///3DES
FBTEncryMode_3DES = b'2'
#///银行无需自动冲正
BankRepealFlag_BankNotNeedRepeal = b'0'
#///银行待自动冲正
BankRepealFlag_BankWaitingRepeal = b'1'
#///银行已自动冲正
BankRepealFlag_BankBeenRepealed = b'2'
#///期商无需自动冲正
BrokerRepealFlag_BrokerNotNeedRepeal = b'0'
#///期商待自动冲正
BrokerRepealFlag_BrokerWaitingRepeal = b'1'
#///期商已自动冲正
BrokerRepealFlag_BrokerBeenRepealed = b'2'
#///银行
InstitutionType_Bank = b'0'
#///期商
InstitutionType_Future = b'1'
#///券商
InstitutionType_Store = b'2'
#///是最后分片
LastFragment_Yes = b'0'
#///不是最后分片
LastFragment_No = b'1'
#///正常
BankAccStatus_Normal = b'0'
#///冻结
BankAccStatus_Freeze = b'1'
#///挂失
BankAccStatus_ReportLoss = b'2'
#///正常
MoneyAccountStatus_Normal = b'0'
#///销户
MoneyAccountStatus_Cancel = b'1'
#///指定存管
ManageStatus_Point = b'0'
#///预指定
ManageStatus_PrePoint = b'1'
#///撤销指定
ManageStatus_CancelPoint = b'2'
#///银期转帐
SystemType_FutureBankTransfer = b'0'
#///银证转帐
SystemType_StockBankTransfer = b'1'
#///第三方存管
SystemType_TheThirdPartStore = b'2'
#///正常处理中
TxnEndFlag_NormalProcessing = b'0'
#///成功结束
TxnEndFlag_Success = b'1'
#///失败结束
TxnEndFlag_Failed = b'2'
#///异常中
TxnEndFlag_Abnormal = b'3'
#///已人工异常处理
TxnEndFlag_ManualProcessedForException = b'4'
#///通讯异常 ,请人工处理
TxnEndFlag_CommuFailedNeedManualProcess = b'5'
#///系统出错,请人工处理
TxnEndFlag_SysErrorNeedManualProcess = b'6'
#///未处理
ProcessStatus_NotProcess = b'0'
#///开始处理
ProcessStatus_StartProcess = b'1'
#///处理完成
ProcessStatus_Finished = b'2'
#///自然人
CustType_Person = b'0'
#///机构户
CustType_Institution = b'1'
#///入金,银行转期货
FBTTransferDirection_FromBankToFuture = b'1'
#///出金,期货转银行
FBTTransferDirection_FromFutureToBank = b'2'
#///开户
OpenOrDestroy_Open = b'1'
#///销户
OpenOrDestroy_Destroy = b'0'
#///未确认
AvailabilityFlag_Invalid = b'0'
#///有效
AvailabilityFlag_Valid = b'1'
#///冲正
AvailabilityFlag_Repeal = b'2'
#///银行代理
OrganType_Bank = b'1'
#///交易前置
OrganType_Future = b'2'
#///银期转帐平台管理
OrganType_PlateForm = b'9'
#///银行总行或期商总部
OrganLevel_HeadQuarters = b'1'
#///银行分中心或期货公司营业部
OrganLevel_Branch = b'2'
#///期商协议
ProtocalID_FutureProtocal = b'0'
#///工行协议
ProtocalID_ICBCProtocal = b'1'
#///农行协议
ProtocalID_ABCProtocal = b'2'
#///中国银行协议
ProtocalID_CBCProtocal = b'3'
#///建行协议
ProtocalID_CCBProtocal = b'4'
#///交行协议
ProtocalID_BOCOMProtocal = b'5'
#///银期转帐平台协议
ProtocalID_FBTPlateFormProtocal = b'X'
#///短连接
ConnectMode_ShortConnect = b'0'
#///长连接
ConnectMode_LongConnect = b'1'
#///异步
SyncMode_ASync = b'0'
#///同步
SyncMode_Sync = b'1'
#///银行存折
BankAccType_BankBook = b'1'
#///储蓄卡
BankAccType_SavingCard = b'2'
#///信用卡
BankAccType_CreditCard = b'3'
#///银行存折
FutureAccType_BankBook = b'1'
#///储蓄卡
FutureAccType_SavingCard = b'2'
#///信用卡
FutureAccType_CreditCard = b'3'
#///启用
OrganStatus_Ready = b'0'
#///签到
OrganStatus_CheckIn = b'1'
#///签退
OrganStatus_CheckOut = b'2'
#///对帐文件到达
OrganStatus_CheckFileArrived = b'3'
#///对帐
OrganStatus_CheckDetail = b'4'
#///日终清理
OrganStatus_DayEndClean = b'5'
#///注销
OrganStatus_Invalid = b'9'
#///按金额扣收
CCBFeeMode_ByAmount = b'1'
#///按月扣收
CCBFeeMode_ByMonth = b'2'
#///客户端
CommApiType_Client = b'1'
#///服务端
CommApiType_Server = b'2'
#///交易系统的UserApi
CommApiType_UserApi = b'3'
#///已经连接
LinkStatus_Connected = b'1'
#///没有连接
LinkStatus_Disconnected = b'2'
#///不核对
PwdFlag_NoCheck = b'0'
#///明文核对
PwdFlag_BlankCheck = b'1'
#///密文核对
PwdFlag_EncryptCheck = b'2'
#///资金帐号
SecuAccType_AccountID = b'1'
#///资金卡号
SecuAccType_CardID = b'2'
#///上海股东帐号
SecuAccType_SHStockholderID = b'3'
#///深圳股东帐号
SecuAccType_SZStockholderID = b'4'
#///正常
TransferStatus_Normal = b'0'
#///被冲正
TransferStatus_Repealed = b'1'
#///期商
SponsorType_Broker = b'0'
#///银行
SponsorType_Bank = b'1'
#///请求
ReqRspType_Request = b'0'
#///响应
ReqRspType_Response = b'1'
#///签到
FBTUserEventType_SignIn = b'0'
#///银行转期货
FBTUserEventType_FromBankToFuture = b'1'
#///期货转银行
FBTUserEventType_FromFutureToBank = b'2'
#///开户
FBTUserEventType_OpenAccount = b'3'
#///销户
FBTUserEventType_CancelAccount = b'4'
#///变更银行账户
FBTUserEventType_ChangeAccount = b'5'
#///冲正银行转期货
FBTUserEventType_RepealFromBankToFuture = b'6'
#///冲正期货转银行
FBTUserEventType_RepealFromFutureToBank = b'7'
#///查询银行账户
FBTUserEventType_QueryBankAccount = b'8'
#///查询期货账户
FBTUserEventType_QueryFutureAccount = b'9'
#///签退
FBTUserEventType_SignOut = b'A'
#///密钥同步
FBTUserEventType_SyncKey = b'B'
#///预约开户
FBTUserEventType_ReserveOpenAccount = b'C'
#///撤销预约开户
FBTUserEventType_CancelReserveOpenAccount = b'D'
#///预约开户确认
FBTUserEventType_ReserveOpenAccountConfirm = b'E'
#///其他
FBTUserEventType_Other = b'Z'
#///插入
DBOperation_Insert = b'0'
#///更新
DBOperation_Update = b'1'
#///删除
DBOperation_Delete = b'2'
#///已同步
SyncFlag_Yes = b'0'
#///未同步
SyncFlag_No = b'1'
#///一次同步
SyncType_OneOffSync = b'0'
#///定时同步
SyncType_TimerSync = b'1'
#///定时完全同步
SyncType_TimerFullSync = b'2'
#///结汇
ExDirection_Settlement = b'0'
#///售汇
ExDirection_Sale = b'1'
#///成功
FBEResultFlag_Success = b'0'
#///账户余额不足
FBEResultFlag_InsufficientBalance = b'1'
#///交易结果未知
FBEResultFlag_UnknownTrading = b'8'
#///失败
FBEResultFlag_Fail = b'x'
#///正常
FBEExchStatus_Normal = b'0'
#///交易重发
FBEExchStatus_ReExchange = b'1'
#///数据包
FBEFileFlag_DataPackage = b'0'
#///文件
FBEFileFlag_File = b'1'
#///未交易
FBEAlreadyTrade_NotTrade = b'0'
#///已交易
FBEAlreadyTrade_Trade = b'1'
#///签到
FBEUserEventType_SignIn = b'0'
#///换汇
FBEUserEventType_Exchange = b'1'
#///换汇重发
FBEUserEventType_ReExchange = b'2'
#///银行账户查询
FBEUserEventType_QueryBankAccount = b'3'
#///换汇明细查询
FBEUserEventType_QueryExchDetial = b'4'
#///换汇汇总查询
FBEUserEventType_QueryExchSummary = b'5'
#///换汇汇率查询
FBEUserEventType_QueryExchRate = b'6'
#///对账文件通知
FBEUserEventType_CheckBankAccount = b'7'
#///签退
FBEUserEventType_SignOut = b'8'
#///其他
FBEUserEventType_Other = b'Z'
#///未处理
FBEReqFlag_UnProcessed = b'0'
#///等待发送
FBEReqFlag_WaitSend = b'1'
#///发送成功
FBEReqFlag_SendSuccess = b'2'
#///发送失败
FBEReqFlag_SendFailed = b'3'
#///等待重发
FBEReqFlag_WaitReSend = b'4'
#///正常
NotifyClass_NOERROR = b'0'
#///警示
NotifyClass_Warn = b'1'
#///追保
NotifyClass_Call = b'2'
#///强平
NotifyClass_Force = b'3'
#///穿仓
NotifyClass_CHUANCANG = b'4'
#///异常
NotifyClass_Exception = b'5'
#///手工强平
ForceCloseType_Manual = b'0'
#///单一投资者辅助强平
ForceCloseType_Single = b'1'
#///批量投资者辅助强平
ForceCloseType_Group = b'2'
#///系统通知
RiskNotifyMethod_System = b'0'
#///短信通知
RiskNotifyMethod_SMS = b'1'
#///邮件通知
RiskNotifyMethod_EMail = b'2'
#///人工通知
RiskNotifyMethod_Manual = b'3'
#///未生成
RiskNotifyStatus_NotGen = b'0'
#///已生成未发送
RiskNotifyStatus_Generated = b'1'
#///发送失败
RiskNotifyStatus_SendError = b'2'
#///已发送未接收
RiskNotifyStatus_SendOk = b'3'
#///已接收未确认
RiskNotifyStatus_Received = b'4'
#///已确认
RiskNotifyStatus_Confirmed = b'5'
#///导出数据
RiskUserEvent_ExportData = b'0'
#///使用最新价升序
ConditionalOrderSortType_LastPriceAsc = b'0'
#///使用最新价降序
ConditionalOrderSortType_LastPriceDesc = b'1'
#///使用卖价升序
ConditionalOrderSortType_AskPriceAsc = b'2'
#///使用卖价降序
ConditionalOrderSortType_AskPriceDesc = b'3'
#///使用买价升序
ConditionalOrderSortType_BidPriceAsc = b'4'
#///使用买价降序
ConditionalOrderSortType_BidPriceDesc = b'5'
#///未发送
SendType_NoSend = b'0'
#///已发送
SendType_Sended = b'1'
#///已生成
SendType_Generated = b'2'
#///报送失败
SendType_SendFail = b'3'
#///接收成功
SendType_Success = b'4'
#///接收失败
SendType_Fail = b'5'
#///取消报送
SendType_Cancel = b'6'
#///未申请
ClientIDStatus_NoApply = b'1'
#///已提交申请
ClientIDStatus_Submited = b'2'
#///已发送申请
ClientIDStatus_Sended = b'3'
#///完成
ClientIDStatus_Success = b'4'
#///拒绝
ClientIDStatus_Refuse = b'5'
#///已撤销编码
ClientIDStatus_Cancel = b'6'
#///单选
QuestionType_Radio = b'1'
#///多选
QuestionType_Option = b'2'
#///填空
QuestionType_Blank = b'3'
#///请求
BusinessType_Request = b'1'
#///应答
BusinessType_Response = b'2'
#///通知
BusinessType_Notice = b'3'
#///成功
CfmmcReturnCode_Success = b'0'
#///该客户已经有流程在处理中
CfmmcReturnCode_Working = b'1'
#///监控中客户资料检查失败
CfmmcReturnCode_InfoFail = b'2'
#///监控中实名制检查失败
CfmmcReturnCode_IDCardFail = b'3'
#///其他错误
CfmmcReturnCode_OtherFail = b'4'
#///所有
ClientType_All = b'0'
#///个人
ClientType_Person = b'1'
#///单位
ClientType_Company = b'2'
#///其他
ClientType_Other = b'3'
#///特殊法人
ClientType_SpecialOrgan = b'4'
#///资管户
ClientType_Asset = b'5'
#///上海期货交易所
ExchangeIDType_SHFE = b'S'
#///郑州商品交易所
ExchangeIDType_CZCE = b'Z'
#///大连商品交易所
ExchangeIDType_DCE = b'D'
#///中国金融期货交易所
ExchangeIDType_CFFEX = b'J'
#///上海国际能源交易中心股份有限公司
ExchangeIDType_INE = b'N'
#///套保
ExClientIDType_Hedge = b'1'
#///套利
ExClientIDType_Arbitrage = b'2'
#///投机
ExClientIDType_Speculation = b'3'
#///未更新
UpdateFlag_NoUpdate = b'0'
#///更新全部信息成功
UpdateFlag_Success = b'1'
#///更新全部信息失败
UpdateFlag_Fail = b'2'
#///更新交易编码成功
UpdateFlag_TCSuccess = b'3'
#///更新交易编码失败
UpdateFlag_TCFail = b'4'
#///已丢弃
UpdateFlag_Cancel = b'5'
#///开户
ApplyOperateID_OpenInvestor = b'1'
#///修改身份信息
ApplyOperateID_ModifyIDCard = b'2'
#///修改一般信息
ApplyOperateID_ModifyNoIDCard = b'3'
#///申请交易编码
ApplyOperateID_ApplyTradingCode = b'4'
#///撤销交易编码
ApplyOperateID_CancelTradingCode = b'5'
#///销户
ApplyOperateID_CancelInvestor = b'6'
#///账户休眠
ApplyOperateID_FreezeAccount = b'8'
#///激活休眠账户
ApplyOperateID_ActiveFreezeAccount = b'9'
#///未补全
ApplyStatusID_NoComplete = b'1'
#///已提交
ApplyStatusID_Submited = b'2'
#///已审核
ApplyStatusID_Checked = b'3'
#///已拒绝
ApplyStatusID_Refused = b'4'
#///已删除
ApplyStatusID_Deleted = b'5'
#///文件发送
SendMethod_ByAPI = b'1'
#///电子发送
SendMethod_ByFile = b'2'
#///增加
EventMode_ADD = b'1'
#///修改
EventMode_UPDATE = b'2'
#///删除
EventMode_DELETE = b'3'
#///复核
EventMode_CHECK = b'4'
#///复制
EventMode_COPY = b'5'
#///注销
EventMode_CANCEL = b'6'
#///冲销
EventMode_Reverse = b'7'
#///自动发送并接收
UOAAutoSend_ASR = b'1'
#///自动发送,不自动接收
UOAAutoSend_ASNR = b'2'
#///不自动发送,自动接收
UOAAutoSend_NSAR = b'3'
#///不自动发送,也不自动接收
UOAAutoSend_NSR = b'4'
#///投资者对应投资者组设置
FlowID_InvestorGroupFlow = b'1'
#///投资者手续费率设置
FlowID_InvestorRate = b'2'
#///投资者手续费率模板关系设置
FlowID_InvestorCommRateModel = b'3'
#///零级复核
CheckLevel_Zero = b'0'
#///一级复核
CheckLevel_One = b'1'
#///二级复核
CheckLevel_Two = b'2'
#///未复核
CheckStatus_Init = b'0'
#///复核中
CheckStatus_Checking = b'1'
#///已复核
CheckStatus_Checked = b'2'
#///拒绝
CheckStatus_Refuse = b'3'
#///作废
CheckStatus_Cancel = b'4'
#///未生效
UsedStatus_Unused = b'0'
#///已生效
UsedStatus_Used = b'1'
#///生效失败
UsedStatus_Fail = b'2'
#///手工录入
BankAcountOrigin_ByAccProperty = b'0'
#///银期转账
BankAcountOrigin_ByFBTransfer = b'1'
#///同日同合约
MonthBillTradeSum_ByInstrument = b'0'
#///同日同合约同价格
MonthBillTradeSum_ByDayInsPrc = b'1'
#///同合约
MonthBillTradeSum_ByDayIns = b'2'
#///银行发起银行转期货
FBTTradeCodeEnum_BankLaunchBankToBroker = b'102001'
#///期货发起银行转期货
FBTTradeCodeEnum_BrokerLaunchBankToBroker = b'202001'
#///银行发起期货转银行
FBTTradeCodeEnum_BankLaunchBrokerToBank = b'102002'
#///期货发起期货转银行
FBTTradeCodeEnum_BrokerLaunchBrokerToBank = b'202002'
#///无动态令牌
OTPType_NONE = b'0'
#///时间令牌
OTPType_TOTP = b'1'
#///未使用
OTPStatus_Unused = b'0'
#///已使用
OTPStatus_Used = b'1'
#///注销
OTPStatus_Disuse = b'2'
#///投资者
BrokerUserType_Investor = b'1'
#///操作员
BrokerUserType_BrokerUser = b'2'
#///商品期货
FutureType_Commodity = b'1'
#///金融期货
FutureType_Financial = b'2'
#///转账限额
FundEventType_Restriction = b'0'
#///当日转账限额
FundEventType_TodayRestriction = b'1'
#///期商流水
FundEventType_Transfer = b'2'
#///资金冻结
FundEventType_Credit = b'3'
#///投资者可提资金比例
FundEventType_InvestorWithdrawAlm = b'4'
#///单个银行帐户转账限额
FundEventType_BankRestriction = b'5'
#///银期签约账户
FundEventType_Accountregister = b'6'
#///交易所出入金
FundEventType_ExchangeFundIO = b'7'
#///投资者出入金
FundEventType_InvestorFundIO = b'8'
#///银期同步
AccountSourceType_FBTransfer = b'0'
#///手工录入
AccountSourceType_ManualEntry = b'1'
#///统一开户(已规范)
CodeSourceType_UnifyAccount = b'0'
#///手工录入(未规范)
CodeSourceType_ManualEntry = b'1'
#///所有
UserRange_All = b'0'
#///单一操作员
UserRange_Single = b'1'
#///按投资者统计
ByGroup_Investor = b'2'
#///按类统计
ByGroup_Group = b'1'
#///按合约统计
TradeSumStatMode_Instrument = b'1'
#///按产品统计
TradeSumStatMode_Product = b'2'
#///按交易所统计
TradeSumStatMode_Exchange = b'3'
#///相对已有规则设置
ExprSetMode_Relative = b'1'
#///典型设置
ExprSetMode_Typical = b'2'
#///公司标准
RateInvestorRange_All = b'1'
#///模板
RateInvestorRange_Model = b'2'
#///单一投资者
RateInvestorRange_Single = b'3'
#///未同步
SyncDataStatus_Initialize = b'0'
#///同步中
SyncDataStatus_Settlementing = b'1'
#///已同步
SyncDataStatus_Settlemented = b'2'
#///来自交易所普通回报
TradeSource_NORMAL = b'0'
#///来自查询
TradeSource_QUERY = b'1'
#///产品统计
FlexStatMode_Product = b'1'
#///交易所统计
FlexStatMode_Exchange = b'2'
#///统计所有
FlexStatMode_All = b'3'
#///属性统计
ByInvestorRange_Property = b'1'
#///统计所有
ByInvestorRange_All = b'2'
#///所有
PropertyInvestorRange_All = b'1'
#///投资者属性
PropertyInvestorRange_Property = b'2'
#///单一投资者
PropertyInvestorRange_Single = b'3'
#///未生成
FileStatus_NoCreate = b'0'
#///已生成
FileStatus_Created = b'1'
#///生成失败
FileStatus_Failed = b'2'
#///下发
FileGenStyle_FileTransmit = b'0'
#///生成
FileGenStyle_FileGen = b'1'
#///增加
SysOperMode_Add = b'1'
#///修改
SysOperMode_Update = b'2'
#///删除
SysOperMode_Delete = b'3'
#///复制
SysOperMode_Copy = b'4'
#///激活
SysOperMode_AcTive = b'5'
#///注销
SysOperMode_CanCel = b'6'
#///重置
SysOperMode_ReSet = b'7'
#///修改操作员密码
SysOperType_UpdatePassword = b'0'
#///操作员组织架构关系
SysOperType_UserDepartment = b'1'
#///角色管理
SysOperType_RoleManager = b'2'
#///角色功能设置
SysOperType_RoleFunction = b'3'
#///基础参数设置
SysOperType_BaseParam = b'4'
#///设置操作员
SysOperType_SetUserID = b'5'
#///用户角色设置
SysOperType_SetUserRole = b'6'
#///用户IP限制
SysOperType_UserIpRestriction = b'7'
#///组织架构管理
SysOperType_DepartmentManager = b'8'
#///组织架构向查询分类复制
SysOperType_DepartmentCopy = b'9'
#///交易编码管理
SysOperType_Tradingcode = b'A'
#///投资者状态维护
SysOperType_InvestorStatus = b'B'
#///投资者权限管理
SysOperType_InvestorAuthority = b'C'
#///属性设置
SysOperType_PropertySet = b'D'
#///重置投资者密码
SysOperType_ReSetInvestorPasswd = b'E'
#///投资者个性信息维护
SysOperType_InvestorPersonalityInfo = b'F'
#///查询当前交易日报送的数据
CSRCDataQueyType_Current = b'0'
#///查询历史报送的代理经纪公司的数据
CSRCDataQueyType_History = b'1'
#///活跃
FreezeStatus_Normal = b'1'
#///休眠
FreezeStatus_Freeze = b'0'
#///已规范
StandardStatus_Standard = b'0'
#///未规范
StandardStatus_NonStandard = b'1'
#///休眠户
RightParamType_Freeze = b'1'
#///激活休眠户
RightParamType_FreezeActive = b'2'
#///开仓权限限制
RightParamType_OpenLimit = b'3'
#///解除开仓权限限制
RightParamType_RelieveOpenLimit = b'4'
#///正常
DataStatus_Normal = b'0'
#///已删除
DataStatus_Deleted = b'1'
#///未复核
AMLCheckStatus_Init = b'0'
#///复核中
AMLCheckStatus_Checking = b'1'
#///已复核
AMLCheckStatus_Checked = b'2'
#///拒绝上报
AMLCheckStatus_RefuseReport = b'3'
#///检查日期
AmlDateType_DrawDay = b'0'
#///发生日期
AmlDateType_TouchDay = b'1'
#///零级审核
AmlCheckLevel_CheckLevel0 = b'0'
#///一级审核
AmlCheckLevel_CheckLevel1 = b'1'
#///二级审核
AmlCheckLevel_CheckLevel2 = b'2'
#///三级审核
AmlCheckLevel_CheckLevel3 = b'3'
#///CSV
ExportFileType_CSV = b'0'
#///Excel
ExportFileType_EXCEL = b'1'
#///DBF
ExportFileType_DBF = b'2'
#///结算前准备
SettleManagerType_Before = b'1'
#///结算
SettleManagerType_Settlement = b'2'
#///结算后核对
SettleManagerType_After = b'3'
#///结算后处理
SettleManagerType_Settlemented = b'4'
#///必要
SettleManagerLevel_Must = b'1'
#///警告
SettleManagerLevel_Alarm = b'2'
#///提示
SettleManagerLevel_Prompt = b'3'
#///不检查
SettleManagerLevel_Ignore = b'4'
#///交易所核对
SettleManagerGroup_Exhcange = b'1'
#///内部核对
SettleManagerGroup_ASP = b'2'
#///上报数据核对
SettleManagerGroup_CSRC = b'3'
#///可重复使用
LimitUseType_Repeatable = b'1'
#///不可重复使用
LimitUseType_Unrepeatable = b'2'
#///本系统
DataResource_Settle = b'1'
#///交易所
DataResource_Exchange = b'2'
#///报送数据
DataResource_CSRC = b'3'
#///交易所保证金率
MarginType_ExchMarginRate = b'0'
#///投资者保证金率
MarginType_InstrMarginRate = b'1'
#///投资者交易保证金率
MarginType_InstrMarginRateTrade = b'2'
#///仅当日生效
ActiveType_Intraday = b'1'
#///长期生效
ActiveType_Long = b'2'
#///交易所保证金率
MarginRateType_Exchange = b'1'
#///投资者保证金率
MarginRateType_Investor = b'2'
#///投资者交易保证金率
MarginRateType_InvestorTrade = b'3'
#///未生成备份数据
BackUpStatus_UnBak = b'0'
#///备份数据生成中
BackUpStatus_BakUp = b'1'
#///已生成备份数据
BackUpStatus_BakUped = b'2'
#///备份数据失败
BackUpStatus_BakFail = b'3'
#///结算初始化未开始
InitSettlement_UnInitialize = b'0'
#///结算初始化中
InitSettlement_Initialize = b'1'
#///结算初始化完成
InitSettlement_Initialized = b'2'
#///未生成报表数据
ReportStatus_NoCreate = b'0'
#///报表数据生成中
ReportStatus_Create = b'1'
#///已生成报表数据
ReportStatus_Created = b'2'
#///生成报表数据失败
ReportStatus_CreateFail = b'3'
#///归档未完成
SaveStatus_UnSaveData = b'0'
#///归档完成
SaveStatus_SaveDatad = b'1'
#///未归档数据
SettArchiveStatus_UnArchived = b'0'
#///数据归档中
SettArchiveStatus_Archiving = b'1'
#///已归档数据
SettArchiveStatus_Archived = b'2'
#///归档数据失败
SettArchiveStatus_ArchiveFail = b'3'
#///未知类型
CTPType_Unkown = b'0'
#///主中心
CTPType_MainCenter = b'1'
#///备中心
CTPType_BackUp = b'2'
#///正常
CloseDealType_Normal = b'0'
#///投机平仓优先
CloseDealType_SpecFirst = b'1'
#///不能使用
MortgageFundUseRange_None = b'0'
#///用于保证金
MortgageFundUseRange_Margin = b'1'
#///用于手续费、盈亏、保证金
MortgageFundUseRange_All = b'2'
#///人民币方案3
MortgageFundUseRange_CNY3 = b'3'
#///郑商所套保产品
SpecProductType_CzceHedge = b'1'
#///货币质押产品
SpecProductType_IneForeignCurrency = b'2'
#///大连短线开平仓产品
SpecProductType_DceOpenClose = b'3'
#///质押
FundMortgageType_Mortgage = b'1'
#///解质
FundMortgageType_Redemption = b'2'
#///基础保证金
AccountSettlementParamID_BaseMargin = b'1'
#///最低权益标准
AccountSettlementParamID_LowestInterest = b'2'
#///货币质入
FundMortDirection_In = b'1'
#///货币质出
FundMortDirection_Out = b'2'
#///盈利
BusinessClass_Profit = b'0'
#///亏损
BusinessClass_Loss = b'1'
#///其他
BusinessClass_Other = b'Z'
#///手工
SwapSourceType_Manual = b'0'
#///自动生成
SwapSourceType_Automatic = b'1'
#///结汇
CurrExDirection_Settlement = b'0'
#///售汇
CurrExDirection_Sale = b'1'
#///已录入
CurrencySwapStatus_Entry = b'1'
#///已审核
CurrencySwapStatus_Approve = b'2'
#///已拒绝
CurrencySwapStatus_Refuse = b'3'
#///已撤销
CurrencySwapStatus_Revoke = b'4'
#///已发送
CurrencySwapStatus_Send = b'5'
#///换汇成功
CurrencySwapStatus_Success = b'6'
#///换汇失败
CurrencySwapStatus_Failure = b'7'
#///未发送
ReqFlag_NoSend = b'0'
#///发送成功
ReqFlag_SendSuccess = b'1'
#///发送失败
ReqFlag_SendFailed = b'2'
#///等待重发
ReqFlag_WaitReSend = b'3'
#///成功
ResFlag_Success = b'0'
#///账户余额不足
ResFlag_InsuffiCient = b'1'
#///交易结果未知
ResFlag_UnKnown = b'8'
#///修改前
ExStatus_Before = b'0'
#///修改后
ExStatus_After = b'1'
#///国内客户
ClientRegion_Domestic = b'1'
#///港澳台客户
ClientRegion_GMT = b'2'
#///国外客户
ClientRegion_Foreign = b'3'
#///没有
HasBoard_No = b'0'
#///有
HasBoard_Yes = b'1'
#///正常
StartMode_Normal = b'1'
#///应急
StartMode_Emerge = b'2'
#///恢复
StartMode_Restore = b'3'
#///全量
TemplateType_Full = b'1'
#///增量
TemplateType_Increment = b'2'
#///备份
TemplateType_BackUp = b'3'
#///交易
LoginMode_Trade = b'0'
#///转账
LoginMode_Transfer = b'1'
#///合约上下市
PromptType_Instrument = b'1'
#///保证金分段生效
PromptType_Margin = b'2'
#///有
HasTrustee_Yes = b'1'
#///没有
HasTrustee_No = b'0'
#///银行
AmType_Bank = b'1'
#///证券公司
AmType_Securities = b'2'
#///基金公司
AmType_Fund = b'3'
#///保险公司
AmType_Insurance = b'4'
#///信托公司
AmType_Trust = b'5'
#///其他
AmType_Other = b'9'
#///出入金
CSRCFundIOType_FundIO = b'0'
#///银期换汇
CSRCFundIOType_SwapCurrency = b'1'
#///期货结算账户
CusAccountType_Futures = b'1'
#///纯期货资管业务下的资管结算账户
CusAccountType_AssetmgrFuture = b'2'
#///综合类资管业务下的期货资管托管账户
CusAccountType_AssetmgrTrustee = b'3'
#///综合类资管业务下的资金中转账户
CusAccountType_AssetmgrTransfer = b'4'
#///中文
LanguageType_Chinese = b'1'
#///英文
LanguageType_English = b'2'
#///个人资管客户
AssetmgrClientType_Person = b'1'
#///单位资管客户
AssetmgrClientType_Organ = b'2'
#///特殊单位资管客户
AssetmgrClientType_SpecialOrgan = b'4'
#///期货类
AssetmgrType_Futures = b'3'
#///综合类
AssetmgrType_SpecialOrgan = b'4'
#///合约交易所不存在
CheckInstrType_HasExch = b'0'
#///合约本系统不存在
CheckInstrType_HasATP = b'1'
#///合约比较不一致
CheckInstrType_HasDiff = b'2'
#///手工交割
DeliveryType_HandDeliv = b'1'
#///到期交割
DeliveryType_PersonDeliv = b'2'
#///不使用大额单边保证金算法
MaxMarginSideAlgorithm_NO = b'0'
#///使用大额单边保证金算法
MaxMarginSideAlgorithm_YES = b'1'
#///自然人
DAClientType_Person = b'0'
#///法人
DAClientType_Company = b'1'
#///其他
DAClientType_Other = b'2'
#///期货类
UOAAssetmgrType_Futures = b'1'
#///综合类
UOAAssetmgrType_SpecialOrgan = b'2'
#///Buy
DirectionEn_Buy = b'0'
#///Sell
DirectionEn_Sell = b'1'
#///Position Opening
OffsetFlagEn_Open = b'0'
#///Position Close
OffsetFlagEn_Close = b'1'
#///Forced Liquidation
OffsetFlagEn_ForceClose = b'2'
#///Close Today
OffsetFlagEn_CloseToday = b'3'
#///Close Prev.
OffsetFlagEn_CloseYesterday = b'4'
#///Forced Reduction
OffsetFlagEn_ForceOff = b'5'
#///Local Forced Liquidation
OffsetFlagEn_LocalForceClose = b'6'
#///Speculation
HedgeFlagEn_Speculation = b'1'
#///Arbitrage
HedgeFlagEn_Arbitrage = b'2'
#///Hedge
HedgeFlagEn_Hedge = b'3'
#///Deposit/Withdrawal
FundIOTypeEn_FundIO = b'1'
#///Bank-Futures Transfer
FundIOTypeEn_Transfer = b'2'
#///Bank-Futures FX Exchange
FundIOTypeEn_SwapCurrency = b'3'
#///Bank Deposit
FundTypeEn_Deposite = b'1'
#///Payment/Fee
FundTypeEn_ItemFund = b'2'
#///Brokerage Adj
FundTypeEn_Company = b'3'
#///Internal Transfer
FundTypeEn_InnerTransfer = b'4'
#///Deposit
FundDirectionEn_In = b'1'
#///Withdrawal
FundDirectionEn_Out = b'2'
#///Pledge
FundMortDirectionEn_In = b'1'
#///Redemption
FundMortDirectionEn_Out = b'2'
#///看涨
OptionsType_CallOptions = b'1'
#///看跌
OptionsType_PutOptions = b'2'
#///欧式
StrikeMode_Continental = b'0'
#///美式
StrikeMode_American = b'1'
#///百慕大
StrikeMode_Bermuda = b'2'
#///自身对冲
StrikeType_Hedge = b'0'
#///匹配执行
StrikeType_Match = b'1'
#///不执行数量
ApplyType_NotStrikeNum = b'4'
#///系统生成
GiveUpDataSource_Gen = b'0'
#///手工添加
GiveUpDataSource_Hand = b'1'
#///没有执行
ExecResult_NoExec = b'n'
#///已经取消
ExecResult_Canceled = b'c'
#///执行成功
ExecResult_OK = b'0'
#///期权持仓不够
ExecResult_NoPosition = b'1'
#///资金不够
ExecResult_NoDeposit = b'2'
#///会员不存在
ExecResult_NoParticipant = b'3'
#///客户不存在
ExecResult_NoClient = b'4'
#///合约不存在
ExecResult_NoInstrument = b'6'
#///没有执行权限
ExecResult_NoRight = b'7'
#///不合理的数量
ExecResult_InvalidVolume = b'8'
#///没有足够的历史成交
ExecResult_NoEnoughHistoryTrade = b'9'
#///未知
ExecResult_Unknown = b'a'
#///期货组合
CombinationType_Future = b'0'
#///垂直价差BUL
CombinationType_BUL = b'1'
#///垂直价差BER
CombinationType_BER = b'2'
#///跨式组合
CombinationType_STD = b'3'
#///宽跨式组合
CombinationType_STG = b'4'
#///备兑组合
CombinationType_PRT = b'5'
#///时间价差组合
CombinationType_CLD = b'6'
#///期货对锁组合
DceCombinationType_SPL = b'0'
#///期权对锁组合
DceCombinationType_OPL = b'1'
#///期货跨期组合
DceCombinationType_SP = b'2'
#///期货跨品种组合
DceCombinationType_SPC = b'3'
#///买入期权垂直价差组合
DceCombinationType_BLS = b'4'
#///卖出期权垂直价差组合
DceCombinationType_BES = b'5'
#///期权日历价差组合
DceCombinationType_CAS = b'6'
#///期权跨式组合
DceCombinationType_STD = b'7'
#///期权宽跨式组合
DceCombinationType_STG = b'8'
#///买入期货期权组合
DceCombinationType_BFO = b'9'
#///卖出期货期权组合
DceCombinationType_SFO = b'a'
#///昨结算价
OptionRoyaltyPriceType_PreSettlementPrice = b'1'
#///开仓价
OptionRoyaltyPriceType_OpenPrice = b'4'
#///最新价与昨结算价较大值
OptionRoyaltyPriceType_MaxPreSettlementPrice = b'5'
#///不计算期权市值盈亏
BalanceAlgorithm_Default = b'1'
#///计算期权市值亏损
BalanceAlgorithm_IncludeOptValLost = b'2'
#///执行
ActionType_Exec = b'1'
#///放弃
ActionType_Abandon = b'2'
#///已经提交
ForQuoteStatus_Submitted = b'a'
#///已经接受
ForQuoteStatus_Accepted = b'b'
#///已经被拒绝
ForQuoteStatus_Rejected = b'c'
#///按绝对值
ValueMethod_Absolute = b'0'
#///按比率
ValueMethod_Ratio = b'1'
#///保留
ExecOrderPositionFlag_Reserve = b'0'
#///不保留
ExecOrderPositionFlag_UnReserve = b'1'
#///自动平仓
ExecOrderCloseFlag_AutoClose = b'0'
#///免于自动平仓
ExecOrderCloseFlag_NotToClose = b'1'
#///期货
ProductType_Futures = b'1'
#///期权
ProductType_Options = b'2'
#///^\d{8}_zz_\d{4}
CZCEUploadFileName_CUFN_O = b'O'
#///^\d{8}成交表
CZCEUploadFileName_CUFN_T = b'T'
#///^\d{8}单腿持仓表new
CZCEUploadFileName_CUFN_P = b'P'
#///^\d{8}非平仓了结表
CZCEUploadFileName_CUFN_N = b'N'
#///^\d{8}平仓表
CZCEUploadFileName_CUFN_L = b'L'
#///^\d{8}资金表
CZCEUploadFileName_CUFN_F = b'F'
#///^\d{8}组合持仓表
CZCEUploadFileName_CUFN_C = b'C'
#///^\d{8}保证金参数表
CZCEUploadFileName_CUFN_M = b'M'
#///^\d{8}_dl_\d{3}
DCEUploadFileName_DUFN_O = b'O'
#///^\d{8}_成交表
DCEUploadFileName_DUFN_T = b'T'
#///^\d{8}_持仓表
DCEUploadFileName_DUFN_P = b'P'
#///^\d{8}_资金结算表
DCEUploadFileName_DUFN_F = b'F'
#///^\d{8}_优惠组合持仓明细表
DCEUploadFileName_DUFN_C = b'C'
#///^\d{8}_持仓明细表
DCEUploadFileName_DUFN_D = b'D'
#///^\d{8}_保证金参数表
DCEUploadFileName_DUFN_M = b'M'
#///^\d{8}_期权执行表
DCEUploadFileName_DUFN_S = b'S'
#///^\d{4}_\d{8}_\d{8}_DailyFundChg
SHFEUploadFileName_SUFN_O = b'O'
#///^\d{4}_\d{8}_\d{8}_Trade
SHFEUploadFileName_SUFN_T = b'T'
#///^\d{4}_\d{8}_\d{8}_SettlementDetail
SHFEUploadFileName_SUFN_P = b'P'
#///^\d{4}_\d{8}_\d{8}_Capital
SHFEUploadFileName_SUFN_F = b'F'
#///^\d{4}_SG\d{1}_\d{8}_\d{1}_Trade
CFFEXUploadFileName_SUFN_T = b'T'
#///^\d{4}_SG\d{1}_\d{8}_\d{1}_SettlementDetail
CFFEXUploadFileName_SUFN_P = b'P'
#///^\d{4}_SG\d{1}_\d{8}_\d{1}_Capital
CFFEXUploadFileName_SUFN_F = b'F'
#///^\d{4}_SG\d{1}_\d{8}_\d{1}_OptionExec
CFFEXUploadFileName_SUFN_S = b'S'
#///申请组合
CombDirection_Comb = b'0'
#///申请拆分
CombDirection_UnComb = b'1'
#///实值额
StrikeOffsetType_RealValue = b'1'
#///盈利额
StrikeOffsetType_ProfitValue = b'2'
#///实值比例
StrikeOffsetType_RealRatio = b'3'
#///盈利比例
StrikeOffsetType_ProfitRatio = b'4'
#///等待处理中
ReserveOpenAccStas_Processing = b'0'
#///已撤销
ReserveOpenAccStas_Cancelled = b'1'
#///已开户
ReserveOpenAccStas_Opened = b'2'
#///无效请求
ReserveOpenAccStas_Invalid = b'3'
#///弱密码库
WeakPasswordSource_Lib = b'1'
#///手工录入
WeakPasswordSource_Manual = b'2'
#///自对冲期权仓位
OptSelfCloseFlag_CloseSelfOptionPosition = b'1'
#///保留期权仓位
OptSelfCloseFlag_ReserveOptionPosition = b'2'
#///自对冲卖方履约后的期货仓位
OptSelfCloseFlag_SellCloseSelfFuturePosition = b'3'
#///保留卖方履约后的期货仓位
OptSelfCloseFlag_ReserveFuturePosition = b'4'
#///期货
BizType_Future = b'1'
#///证券
BizType_Stock = b'2'
#///直连的投资者
AppType_TYPE_Investor = b'1'
#///为每个投资者都创建连接的中继
AppType_TYPE_InvestorRelay = b'2'
#///所有投资者共享一个操作员连接的中继
AppType_TYPE_OperatorRelay = b'3'
#///未知
AppType_TYPE_UnKnown = b'4'
#///检查成功
ResponseValue_Right = b'0'
#///检查失败
ResponseValue_Refuse = b'1'
#///大宗交易
OTCTradeType_TRDT_Block = b'0'
#///期转现
OTCTradeType_TRDT_EFP = b'1'
#///基点价值
MatchType_MT_DV01 = b'1'
#///面值
MatchType_MT_ParValue = b'2'
|
AlgoPlus
|
/CTP/ApiConst.py
|
ApiConst.py
|
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
# 项目地址:https://gitee.com/AlgoPlus/
from time import sleep, perf_counter as timer
from AlgoPlus.CTP.TraderApiBase import TraderApiBase
from AlgoPlus.CTP.FutureAccount import FutureAccount
from AlgoPlus.CTP.ApiStruct import *
from AlgoPlus.CTP.ApiConst import *
from AlgoPlus.utils.base_field import to_bytes, to_str
class RiskManager(TraderApiBase):
# ############################################################################# #
def __init__(self, broker_id, td_server, investor_id, password, app_id, auth_code, md_queue=None, flow_path='', private_resume_type=2, public_resume_type=2):
pass
# ############################################################################# #
def init_extra(self):
"""
初始化策略参数
:return:
"""
'''
{'InstrumentID': ,}
# '''
self.md_dict = {}
'''
{'InstrumentID': [b'00:00:00', b'00:00:00'], }
# '''
self.server_time_dict = {}
'''
{'InstrumentID':
{
'LongVolume': 0, 'LongVolumeToday': 0, 'LongVolumeYesterday': 0, 'LongPositionDetailList': []
, 'ShortVolume': 0, 'ShortVolumeToday': 0, 'ShortVolumeYesterday': 0, 'ShortPositionDetailList': []
}
}
# '''
self.local_position_dict = {}
'''
{'InstrumentID': 0}
#'''
self.action_num_dict = {} # 撤单次数 #
"""
{"InstrumentID": {'0': [], '1': []}}
#"""
self.pl_parameter_dict = {} # 止盈止损参数
parameter_dict = self.md_queue.get(block=False) # 策略参数结构体
self.id = parameter_dict['StrategyID']
self.order_ref = self.id * 10000
self.order_ref_range = [self.order_ref, self.order_ref + 10000]
self.pl_parameter_dict = parameter_dict['ProfitLossParameter']
# ############################################################################# #
def get_price(self, instrument_id, direction, price_type=0):
"""
:param instrument_id:合约
:param direction:持仓方向
:param price_type:0->对手价, 1->排队价, 2->市价
:return:报单价格
"""
md = self.md_dict[instrument_id]
if price_type == 0: # 对手价
return md['BidPrice1'] if direction == Direction_Sell else md['AskPrice1']
elif price_type == 1: # 排队价
return md['AskPrice1'] if direction == Direction_Sell else md['BidPrice1']
elif price_type == 2: # 市价
return md['LowerLimitPrice'] if direction == Direction_Sell else md['UpperLimitPrice']
return None
def OnRspOrderInsert(self, pInputOrder, pRspInfo, nRequestID, bIsLast):
"""
录入撤单回报。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrder: AlgoPlus.CTP.ApiStruct中InputOrderField的实例。
:param pRspInfo: AlgoPlus.CTP.ApiStruct中RspInfoField的实例。包含错误代码ErrorID和错误信息ErrorMsg
:param nRequestID:
:param bIsLast:
:return:
"""
if self.is_my_order(pInputOrder['OrderRef']):
if pRspInfo['ErrorID'] != 0:
self.on_order_insert_fail(pInputOrder)
self.write_log(pRspInfo, pInputOrder)
def OnErrRtnOrderInsert(self, pInputOrder, pRspInfo):
"""
订单错误通知。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrder: AlgoPlus.CTP.ApiStruct中的InputOrderField实例。
:param pRspInfo: AlgoPlus.CTP.ApiStruct中RspInfoField的实例。包含错误代码ErrorID和错误信息ErrorMsg
:return:
"""
if self.is_my_order(pInputOrder['OrderRef']):
if pRspInfo['ErrorID'] != 0:
self.on_order_insert_fail(pInputOrder)
self.write_log(pRspInfo, pInputOrder)
def on_order_insert_fail(self, pOrder):
"""
报单失败处理逻辑。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrder: AlgoPlus.CTP.ApiStruct中的InputOrderField实例。
:return:
"""
pass
# ############################################################################# #
def OnRspOrderAction(self, pInputOrderAction, pRspInfo, nRequestID, bIsLast):
"""
录入撤单回报。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrderAction: AlgoPlus.CTP.ApiStruct中InputOrderActionField的实例。
:param pRspInfo: AlgoPlus.CTP.ApiStruct中RspInfoField的实例。包含错误代码ErrorID和错误信息ErrorMsg。
:param nRequestID:
:param bIsLast:
:return:
"""
if self.is_my_order(pInputOrderAction['OrderRef']):
if pRspInfo['ErrorID'] != 0:
self.on_order_action_fail(pInputOrderAction)
self.write_log(pRspInfo, pInputOrderAction)
def on_order_action_fail(self, pInputOrderAction):
"""
撤单失败处理逻辑。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrderAction: AlgoPlus.CTP.ApiStruct中InputOrderActionField的实例。
:return:
"""
pass
# ############################################################################# #
def is_my_order(self, order_ref):
"""
以order_ref标识本策略订单。
"""
return True
def OnRtnOrder(self, pOrder):
"""
当收到订单状态变化时,可以在本方法中获得通知。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
根据pOrder['OrderStatus']的取值调用适应的交易算法。
:param pOrder: AlgoPlus.CTP.ApiStruct中OrderField的实例。
OrderField的OrderStatus字段枚举值及含义:
(‘全部成交 : 0’,)
(‘部分成交还在队列中 : 1’,)
(‘部分成交不在队列中 : 2’,)
(‘未成交还在队列中 : 3’,)
(‘未成交不在队列中 : 4’,)
(‘撤单 : 5’,)
(‘未知 : a’,)
(‘尚未触发 : b’,)
(‘已触发 : c’,)
OrderField的OrderSubmitStatus字段枚举值及含义:
(‘已经提交 : 0’,)
(‘撤单已经提交 : 1’,)
(‘修改已经提交 : 2’,)
(‘已经接受 : 3’,)
(‘报单已经被拒绝 : 4’,)
(‘撤单已经被拒绝 : 5’,)
(‘改单已经被拒绝 : 6’,)
:return:
"""
# 未成交
if pOrder['OrderStatus'] == OrderStatus_NoTradeQueueing:
pass
# 全部成交
elif pOrder['OrderStatus'] == OrderStatus_AllTraded or pOrder['OrderStatus'] == OrderStatus_PartTradedQueueing:
self.on_order_traded(pOrder)
# 撤单成功
elif pOrder['OrderStatus'] == OrderStatus_Canceled:
if pOrder['InstrumentID'] in self.action_num_dict.keys():
self.action_num_dict[pOrder['InstrumentID']] += 1
else:
self.action_num_dict[pOrder['InstrumentID']] = 1
self.on_order_action(pOrder)
# 委托失败
elif pOrder['OrderSubmitStatus'] == OrderSubmitStatus_InsertRejected:
self.on_order_insert_fail(pOrder)
# 撤单失败
elif pOrder['OrderSubmitStatus'] == OrderSubmitStatus_CancelRejected:
self.on_order_action_fail(pOrder)
self.write_log(to_str(pOrder['StatusMsg']), pOrder)
def on_order_traded(self, pOrder):
pass
def on_order_action(self, pOrder):
pass
# ############################################################################# #
def OnRtnTrade(self, pTrade):
"""
当报单成交时,可以在本方法中获得通知。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
TradeField包含成交价格,而OrderField则没有。
如果不需要成交价格,可忽略该通知,使用OrderField。
:param pTrade: AlgoPlus.CTP.ApiStruct中的TradeField实例。
:return:
"""
pTrade['IsLock'] = False
pTrade['AnchorTime'] = timer()
pTrade['StopProfitDict'] = {}
pTrade['StopLossDict'] = {}
if pTrade['InstrumentID'] not in self.local_position_dict.keys():
self.local_position_dict[pTrade['InstrumentID']] = {'LongVolume': 0, 'LongVolumeToday': 0, 'LongVolumeYesterday': 0, 'LongPositionList': [],
'ShortVolume': 0, 'ShortVolumeToday': 0, 'ShortVolumeYesterday': 0, 'ShortPositionList': []}
local_position = self.local_position_dict[pTrade['InstrumentID']]
if pTrade['OffsetFlag'] == OffsetFlag_Open:
self.update_stop_price(pTrade)
if pTrade['Direction'] == Direction_Buy:
local_position['LongVolume'] += pTrade['Volume']
local_position['LongPositionList'].append(pTrade)
elif pTrade['Direction'] == Direction_Sell:
local_position['ShortVolume'] += pTrade['Volume']
local_position['ShortPositionList'].append(pTrade)
elif pTrade['Direction'] == Direction_Buy:
local_position['ShortVolume'] = max(local_position['ShortVolume'] - pTrade['Volume'], 0)
elif pTrade['Direction'] == Direction_Sell:
local_position['LongVolume'] = max(local_position['LongVolume'] - pTrade['Volume'], 0)
def update_stop_price(self, position):
"""
:param position: 持仓信息
:return:
"""
if position['InstrumentID'] in self.pl_parameter_dict.keys():
pl_dict = self.pl_parameter_dict[position['InstrumentID']]
for pl_type, delta in pl_dict.items():
# 固定止盈
sgn = 1 if position['Direction'] == Direction_Buy else -1
if pl_type == '0':
position['StopProfitDict']['0'] = position['Price'] + delta[0] * sgn
# 固定止损
elif pl_type == '1':
position['StopLossDict']['1'] = position['Price'] - delta[0] * sgn
# ############################################################################# #
def check_position(self):
"""
检查所有持仓是否触发持仓阈值。
"""
for instrument_id, position in self.local_position_dict.items():
if instrument_id not in self.md_dict.keys():
break
md = self.md_dict[instrument_id]
for long_position in position['LongPositionList']:
if not long_position['IsLock']:
trigger = False
for stop_profit in long_position['StopProfitDict'].values():
if md['LastPrice'] > stop_profit:
trigger = True
break
if not trigger:
for stop_loss in long_position['StopLossDict'].values():
if md['LastPrice'] < stop_loss:
trigger = True
break
if trigger:
order_price = self.get_price(instrument_id, Direction_Sell)
if order_price is not None:
self.sell_close(long_position['ExchangeID'], instrument_id, order_price, long_position['Volume'])
long_position['IsLock'] = True
for short_position in position['ShortPositionList']:
if not short_position['IsLock']:
trigger = False
for stop_profit in short_position['StopProfitDict'].values():
if md['LastPrice'] < stop_profit:
trigger = True
break
if not trigger:
for stop_loss in short_position['StopLossDict'].values():
if md['LastPrice'] > stop_loss:
trigger = True
break
if trigger:
order_price = self.get_price(instrument_id, Direction_Buy)
if order_price is not None:
self.buy_close(short_position['ExchangeID'], instrument_id, order_price, short_position['Volume'])
short_position['IsLock'] = True
# ############################################################################# #
def Join(self):
while True:
if self.status == 0:
if self.md_queue is not None:
while not self.md_queue.empty():
last_md = self.md_queue.get(block=False)
self.md_dict[last_md['InstrumentID']] = last_md
self.check_position()
else:
sleep(1)
def run_risk_manager(account, md_queue=None):
if isinstance(account, FutureAccount):
trader_engine = RiskManager(
account.broker_id,
account.server_dict['TDServer'],
account.investor_id,
account.password,
account.app_id,
account.auth_code,
md_queue,
account.td_flow_path
)
trader_engine.Join()
|
AlgoPlus
|
/CTP/RiskManager.py
|
RiskManager.py
|
# AlgoPlus量化投资开源框架
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
from time import sleep, perf_counter as timer
from AlgoPlus.CTP.TraderApiBase import TraderApiBase
from AlgoPlus.CTP.FutureAccount import FutureAccount
from AlgoPlus.utils.base_field import to_bytes, to_str
from AlgoPlus.CTP.ApiStruct import *
from AlgoPlus.CTP.ApiConst import *
class TraderApi(TraderApiBase):
# ############################################################################# #
def __init__(self, broker_id, td_server, investor_id, password, app_id, auth_code, md_queue=None,
page_dir='', private_resume_type=2, public_resume_type=2):
pass
# ############################################################################# #
def init_extra(self):
"""
初始化策略参数
:return:
"""
self.rtn_order_list = []
self.last_rtn_order_index = 0 # 已处理报单ID
self.rtn_trade_list = []
self.last_rtn_trade_index = 0 # 已处理成交ID
'''
{'InstrumentID': ,}
# '''
self.md_dict = {}
'''
{'InstrumentID': [b'00:00:00', b'00:00:00'], }
# '''
self.server_time_dict = {}
'''
{'InstrumentID':
{
'LongVolume': 0, 'LongVolumeToday': 0, 'LongVolumeYesterday': 0, 'LongPositionDetailList': []
, 'ShortVolume': 0, 'ShortVolumeToday': 0, 'ShortVolumeYesterday': 0, 'ShortPositionDetailList': []
}
}
# '''
self.local_position_dict = {}
'''
{'InstrumentID': 0}
#'''
self.action_num_dict = {} # 撤单次数 #
"""
{"InstrumentID": {'0': [], '1': []}}
#"""
self.pl_parameter_dict = {} # 止盈止损参数
parameter_dict = self.md_queue.get(block=False) # 策略参数结构体
self.id = parameter_dict['StrategyID']
self.order_ref = self.id * 10000
self.order_ref_range = [self.order_ref, self.order_ref + 10000]
self.pl_parameter_dict = parameter_dict['ProfitLossParameter']
# ############################################################################# #
def get_price(self, instrument_id, direction, price_type=0):
"""
:param instrument_id:合约
:param direction:持仓方向
:param price_type:0->对手价, 1->排队价, 2->市价
:return:报单价格
"""
result = None
try:
md = self.md_dict[instrument_id]
if price_type == 0: # 对手价
result = md['BidPrice1'] if direction == Direction_Sell else md['AskPrice1']
elif price_type == 1: # 排队价
result = md['AskPrice1'] if direction == Direction_Sell else md['BidPrice1']
elif price_type == 2: # 市价
result = md['LowerLimitPrice'] if direction == Direction_Sell else md['UpperLimitPrice']
except Exception as err_msg:
self.write_log('get_default_price', err_msg)
finally:
return result
def OnRspOrderInsert(self, pInputOrder, pRspInfo, nRequestID, bIsLast):
"""
录入撤单回报。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrder: AlgoPlus.CTP.ApiStruct中InputOrderField的实例。
:param pRspInfo: AlgoPlus.CTP.ApiStruct中RspInfoField的实例。包含错误代码ErrorID和错误信息ErrorMsg
:param nRequestID:
:param bIsLast:
:return:
"""
if self.is_my_order(pInputOrder['OrderRef']):
if pRspInfo['ErrorID'] != 0:
self.on_order_insert_fail(pInputOrder)
self.write_log(pRspInfo, pInputOrder)
def OnErrRtnOrderInsert(self, pInputOrder, pRspInfo):
"""
订单错误通知。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrder: AlgoPlus.CTP.ApiStruct中的InputOrderField实例。
:param pRspInfo: AlgoPlus.CTP.ApiStruct中RspInfoField的实例。包含错误代码ErrorID和错误信息ErrorMsg
:return:
"""
if self.is_my_order(pInputOrder['OrderRef']):
if pRspInfo['ErrorID'] != 0:
self.on_order_insert_fail(pInputOrder)
self.write_log(pRspInfo, pInputOrder)
def on_order_insert_fail(self, pOrder):
"""
报单失败处理逻辑。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrder: AlgoPlus.CTP.ApiStruct中的InputOrderField实例。
:return:
"""
pass
# ############################################################################# #
def OnRspOrderAction(self, pInputOrderAction, pRspInfo, nRequestID, bIsLast):
"""
录入撤单回报。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrderAction: AlgoPlus.CTP.ApiStruct中InputOrderActionField的实例。
:param pRspInfo: AlgoPlus.CTP.ApiStruct中RspInfoField的实例。包含错误代码ErrorID和错误信息ErrorMsg。
:param nRequestID:
:param bIsLast:
:return:
"""
if self.is_my_order(pInputOrderAction['OrderRef']):
if pRspInfo['ErrorID'] != 0:
self.on_order_action_fail(pInputOrderAction)
self.write_log(pRspInfo, pInputOrderAction)
def on_order_action_fail(self, pInputOrderAction):
"""
撤单失败处理逻辑。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
:param pInputOrderAction: AlgoPlus.CTP.ApiStruct中InputOrderActionField的实例。
:return:
"""
pass
# ############################################################################# #
def is_my_order(self, order_ref):
"""
以order_ref标识本策略订单。
"""
return True
def OnRtnOrder(self, pOrder):
"""
当收到订单状态变化时,可以在本方法中获得通知。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
根据pOrder['OrderStatus']的取值调用适应的交易算法。
:param pOrder: AlgoPlus.CTP.ApiStruct中OrderField的实例。
OrderField的OrderStatus字段枚举值及含义:
(‘全部成交 : 0’,)
(‘部分成交还在队列中 : 1’,)
(‘部分成交不在队列中 : 2’,)
(‘未成交还在队列中 : 3’,)
(‘未成交不在队列中 : 4’,)
(‘撤单 : 5’,)
(‘未知 : a’,)
(‘尚未触发 : b’,)
(‘已触发 : c’,)
OrderField的OrderSubmitStatus字段枚举值及含义:
(‘已经提交 : 0’,)
(‘撤单已经提交 : 1’,)
(‘修改已经提交 : 2’,)
(‘已经接受 : 3’,)
(‘报单已经被拒绝 : 4’,)
(‘撤单已经被拒绝 : 5’,)
(‘改单已经被拒绝 : 6’,)
:return:
"""
# # 延时计时结束
if self.is_my_order(pOrder['OrderRef']):
self.rtn_order_list.append(pOrder)
def process_rtn_order(self):
try:
last_rtn_order_index = len(self.rtn_order_list)
for rtn_order in self.rtn_order_list[self.last_rtn_order_index:last_rtn_order_index]:
# 未成交
if rtn_order['OrderStatus'] == OrderStatus_NoTradeQueueing:
pass
# 全部成交
elif rtn_order['OrderStatus'] == OrderStatus_AllTraded or rtn_order['OrderStatus'] == OrderStatus_PartTradedQueueing:
self.on_order_traded(rtn_order)
# 撤单成功
elif rtn_order['OrderStatus'] == OrderStatus_Canceled:
if rtn_order['InstrumentID'] in self.action_num_dict.keys():
self.action_num_dict[rtn_order['InstrumentID']] += 1
else:
self.action_num_dict[rtn_order['InstrumentID']] = 1
self.on_order_action(rtn_order)
# 委托失败
elif rtn_order['OrderSubmitStatus'] == OrderSubmitStatus_InsertRejected:
self.on_order_insert_fail(rtn_order)
# 撤单失败
elif rtn_order['OrderSubmitStatus'] == OrderSubmitStatus_CancelRejected:
self.on_order_action_fail(rtn_order)
self.write_log(to_str(rtn_order['StatusMsg']), rtn_order)
self.last_rtn_order_index = last_rtn_order_index
except Exception as err_msg:
self.write_log('process_rtn_order', err_msg.__doc__)
def on_order_traded(self, pOrder):
pass
def on_order_action(self, pOrder):
pass
# ############################################################################# #
def OnRtnTrade(self, pTrade):
"""
当报单成交时,可以在本方法中获得通知。不适宜在回调函数里做比较耗时的操作。可参考OnRtnOrder的做法。
TradeField包含成交价格,而OrderField则没有。
如果不需要成交价格,可忽略该通知,使用OrderField。
:param pTrade: AlgoPlus.CTP.ApiStruct中的TradeField实例。
:return:
"""
if self.is_my_order(pTrade['OrderRef']):
self.rtn_trade_list.append(pTrade)
def process_rtn_trade(self):
"""
从上次订单ID位置开始处理订单数据。
:return:
"""
try:
last_rtn_trade_index = len(self.rtn_trade_list)
for rtn_trade in self.rtn_trade_list[self.last_rtn_trade_index:last_rtn_trade_index]:
rtn_trade['IsLock'] = False
rtn_trade['AnchorTime'] = timer()
rtn_trade['StopProfitDict'] = {}
rtn_trade['StopLossDict'] = {}
if rtn_trade['InstrumentID'] not in self.local_position_dict.keys():
self.local_position_dict[rtn_trade['InstrumentID']] = {'LongVolume': 0, 'LongVolumeToday': 0, 'LongVolumeYesterday': 0, 'LongPositionList': [],
'ShortVolume': 0, 'ShortVolumeToday': 0, 'ShortVolumeYesterday': 0, 'ShortPositionList': []}
local_position = self.local_position_dict[rtn_trade['InstrumentID']]
if rtn_trade['OffsetFlag'] == OffsetFlag_Open:
self.update_stop_price(rtn_trade)
if rtn_trade['Direction'] == Direction_Buy:
local_position['LongVolume'] += rtn_trade['Volume']
local_position['LongPositionList'].append(rtn_trade)
elif rtn_trade['Direction'] == Direction_Sell:
local_position['ShortVolume'] += rtn_trade['Volume']
local_position['ShortPositionList'].append(rtn_trade)
elif rtn_trade['Direction'] == Direction_Buy:
local_position['ShortVolume'] = max(local_position['ShortVolume'] - rtn_trade['Volume'], 0)
elif rtn_trade['Direction'] == Direction_Sell:
local_position['LongVolume'] = max(local_position['LongVolume'] - rtn_trade['Volume'], 0)
self.last_rtn_trade_index = last_rtn_trade_index
except Exception as err_msg:
self.write_log('process_rtn_trade', err_msg)
def update_stop_price(self, position):
"""
获取止盈止损阈值。止损类型参考https://7jia.com/1002.html
:param position: 持仓信息
:return:
"""
try:
if position['InstrumentID'] in self.pl_parameter_dict.keys():
pl_dict = self.pl_parameter_dict[position['InstrumentID']]
for pl_type, delta in pl_dict.items():
# 固定止盈
sgn = 1 if position['Direction'] == Direction_Buy else -1
if pl_type == '0':
position['StopProfitDict']['0'] = position['Price'] + delta[0] * sgn
# 固定止损
elif pl_type == '1':
position['StopLossDict']['1'] = position['Price'] - delta[0] * sgn
except Exception as err_msg:
self.write_log('update_stop_price', err_msg)
# ############################################################################# #
def check_position(self):
"""
检查所有持仓是否触发持仓阈值。
"""
try:
for instrument_id, position in self.local_position_dict.items():
if instrument_id not in self.md_dict.keys():
break
md = self.md_dict[instrument_id]
for long_position in position['LongPositionList']:
if not long_position['IsLock']:
trigger = False
for stop_profit in long_position['StopProfitDict'].values():
if md['LastPrice'] > stop_profit:
trigger = True
break
if not trigger:
for stop_loss in long_position['StopLossDict'].values():
if md['LastPrice'] < stop_loss:
trigger = True
break
if trigger:
order_price = self.get_price(instrument_id, Direction_Sell)
if order_price is not None:
self.sell_close(long_position['ExchangeID'], instrument_id, order_price, long_position['Volume'])
long_position['IsLock'] = True
for short_position in position['ShortPositionList']:
if not short_position['IsLock']:
trigger = False
for stop_profit in short_position['StopProfitDict'].values():
if md['LastPrice'] < stop_profit:
trigger = True
break
if not trigger:
for stop_loss in short_position['StopLossDict'].values():
if md['LastPrice'] > stop_loss:
trigger = True
break
if trigger:
order_price = self.get_price(instrument_id, Direction_Buy)
if order_price is not None:
self.buy_close(short_position['ExchangeID'], instrument_id, order_price, short_position['Volume'])
short_position['IsLock'] = True
except Exception as err:
self.write_log(err)
# ############################################################################# #
def Join(self):
while True:
if self.status == 0:
if self.md_queue is not None:
while not self.md_queue.empty():
last_md = self.md_queue.get(block=False)
self.md_dict[last_md['InstrumentID']] = last_md
self.process_rtn_order()
self.process_rtn_trade()
self.check_position()
else:
sleep(1)
def run_traderapi(account, md_queue=None):
if isinstance(account, FutureAccount):
trader_engine = TraderApi(
account.broker_id,
account.server_dict['TDServer'],
account.investor_id,
account.password,
account.app_id,
account.auth_code,
md_queue,
account.td_page_dir
)
trader_engine.Join()
class ReqInstrumentApi(TraderApiBase):
def __init__(self, broker_id, td_server, investor_id, password, app_id, auth_code, md_queue=None,
page_dir='', private_resume_type=2, public_resume_type=2):
self.instrument_id_list = []
def req_qry_instrumrent_id(self):
qry_instrument_field = QryInstrumentField()
self.ReqQryInstrument(qry_instrument_field)
def OnRspQryInstrument(self, pInstrument, pRspInfo, nRequestID, bIsLast):
if not pRspInfo or pRspInfo['ErrorID'] == 0:
if pInstrument \
and (pInstrument['InstrumentID'][-4:].isdigit() and pInstrument['InstrumentID'][:-4].isalpha()
or pInstrument['InstrumentID'][-3:].isdigit() and pInstrument['InstrumentID'][:-3].isalpha()):
self.instrument_id_list.append(pInstrument['InstrumentID'])
self.status += bIsLast
def Join(self):
while True:
if self.status == 0:
self.req_qry_instrumrent_id()
self.status = 1
elif self.status == 2:
return self.instrument_id_list
sleep(1)
def req_instrument(account):
if isinstance(account, FutureAccount):
trader_engine = ReqInstrumentApi(account.broker_id,
account.server_dict['TDServer'],
account.investor_id,
account.password,
account.app_id,
account.auth_code,
None,
account.td_page_dir)
return trader_engine.Join()
|
AlgoPlus
|
/CTP/TraderApi.py
|
TraderApi.py
|
# AlgoPlus量化投资开源框架
# 微信公众号:AlgoPlus
# 官网:http://algo.plus
from ctypes import *
from AlgoPlus.utils.base_field import BaseField
class LocalInputOrderField(BaseField):
_fields_ = [
('ExchangeID', c_char * 9), # 交易所代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('Direction', c_char * 1), # 买卖方向
('OffsetFlag', c_char * 5), # 组合开平标志
('LimitPrice', c_double), # 报单价格
('VolumeTotalOriginal', c_int), # 数量
('VolumeTotal', c_int), # 剩余数量
('OrderStatus', c_char * 1), # 报单状态
('InputTime', c_float), # 委托时间
]
class DisseminationField(BaseField):
"""信息分发"""
_fields_ = [
('SequenceSeries', c_short), # ///序列系列号
('SequenceNo', c_int) # 序列号
]
class ReqUserLoginField(BaseField):
"""用户登录请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('Password', c_char * 41), # 密码
('UserProductInfo', c_char * 11), # 用户端产品信息
('InterfaceProductInfo', c_char * 11), # 接口端产品信息
('ProtocolInfo', c_char * 11), # 协议信息
('MacAddress', c_char * 21), # Mac地址
('OneTimePassword', c_char * 41), # 动态密码
('ClientIPAddress', c_char * 16), # 终端IP地址
('LoginRemark', c_char * 36), # 登录备注
('ClientIPPort', c_int) # 终端IP端口
]
class RspUserLoginField(BaseField):
"""用户登录应答"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('LoginTime', c_char * 9), # 登录成功时间
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('SystemName', c_char * 41), # 交易系统名称
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('MaxOrderRef', c_char * 13), # 最大报单引用
('SHFETime', c_char * 9), # 上期所时间
('DCETime', c_char * 9), # 大商所时间
('CZCETime', c_char * 9), # 郑商所时间
('FFEXTime', c_char * 9), # 中金所时间
('INETime', c_char * 9) # 能源中心时间
]
class UserLogoutField(BaseField):
"""用户登出请求"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class ForceUserLogoutField(BaseField):
"""强制交易员退出"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class ReqAuthenticateField(BaseField):
"""客户端认证请求"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserProductInfo', c_char * 11), # 用户端产品信息
('AuthCode', c_char * 17), # 认证码
('AppID', c_char * 33) # App代码
]
class RspAuthenticateField(BaseField):
"""客户端认证响应"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserProductInfo', c_char * 11), # 用户端产品信息
('AppID', c_char * 33), # App代码
('AppType', c_char * 1) # App类型
]
class AuthenticationInfoField(BaseField):
"""客户端认证信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserProductInfo', c_char * 11), # 用户端产品信息
('AuthInfo', c_char * 129), # 认证信息
('IsResult', c_int), # 是否为认证结果
('AppID', c_char * 33), # App代码
('AppType', c_char * 1) # App类型
]
class RspUserLogin2Field(BaseField):
"""用户登录应答2"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('LoginTime', c_char * 9), # 登录成功时间
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('SystemName', c_char * 41), # 交易系统名称
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('MaxOrderRef', c_char * 13), # 最大报单引用
('SHFETime', c_char * 9), # 上期所时间
('DCETime', c_char * 9), # 大商所时间
('CZCETime', c_char * 9), # 郑商所时间
('FFEXTime', c_char * 9), # 中金所时间
('INETime', c_char * 9), # 能源中心时间
('RandomString', c_char * 17) # 随机串
]
class TransferHeaderField(BaseField):
"""银期转帐报文头"""
_fields_ = [
('Version', c_char * 4), # ///版本号,常量,1.0
('TradeCode', c_char * 7), # 交易代码,必填
('TradeDate', c_char * 9), # 交易日期,必填,格式:yyyymmdd
('TradeTime', c_char * 9), # 交易时间,必填,格式:hhmmss
('TradeSerial', c_char * 9), # 发起方流水号,N/A
('FutureID', c_char * 11), # 期货公司代码,必填
('BankID', c_char * 4), # 银行代码,根据查询银行得到,必填
('BankBrchID', c_char * 5), # 银行分中心代码,根据查询银行得到,必填
('OperNo', c_char * 17), # 操作员,N/A
('DeviceID', c_char * 3), # 交易设备类型,N/A
('RecordNum', c_char * 7), # 记录数,N/A
('SessionID', c_int), # 会话编号,N/A
('RequestID', c_int) # 请求编号,N/A
]
class TransferBankToFutureReqField(BaseField):
"""银行资金转期货请求,TradeCode=202001"""
_fields_ = [
('FutureAccount', c_char * 13), # ///期货资金账户
('FuturePwdFlag', c_char * 1), # 密码标志
('FutureAccPwd', c_char * 17), # 密码
('TradeAmt', c_double), # 转账金额
('CustFee', c_double), # 客户手续费
('CurrencyCode', c_char * 4) # 币种:RMB-人民币 USD-美圆 HKD-港元
]
class TransferBankToFutureRspField(BaseField):
"""银行资金转期货请求响应"""
_fields_ = [
('RetCode', c_char * 5), # ///响应代码
('RetInfo', c_char * 129), # 响应信息
('FutureAccount', c_char * 13), # 资金账户
('TradeAmt', c_double), # 转帐金额
('CustFee', c_double), # 应收客户手续费
('CurrencyCode', c_char * 4) # 币种
]
class TransferFutureToBankReqField(BaseField):
"""期货资金转银行请求,TradeCode=202002"""
_fields_ = [
('FutureAccount', c_char * 13), # ///期货资金账户
('FuturePwdFlag', c_char * 1), # 密码标志
('FutureAccPwd', c_char * 17), # 密码
('TradeAmt', c_double), # 转账金额
('CustFee', c_double), # 客户手续费
('CurrencyCode', c_char * 4) # 币种:RMB-人民币 USD-美圆 HKD-港元
]
class TransferFutureToBankRspField(BaseField):
"""期货资金转银行请求响应"""
_fields_ = [
('RetCode', c_char * 5), # ///响应代码
('RetInfo', c_char * 129), # 响应信息
('FutureAccount', c_char * 13), # 资金账户
('TradeAmt', c_double), # 转帐金额
('CustFee', c_double), # 应收客户手续费
('CurrencyCode', c_char * 4) # 币种
]
class TransferQryBankReqField(BaseField):
"""查询银行资金请求,TradeCode=204002"""
_fields_ = [
('FutureAccount', c_char * 13), # ///期货资金账户
('FuturePwdFlag', c_char * 1), # 密码标志
('FutureAccPwd', c_char * 17), # 密码
('CurrencyCode', c_char * 4) # 币种:RMB-人民币 USD-美圆 HKD-港元
]
class TransferQryBankRspField(BaseField):
"""查询银行资金请求响应"""
_fields_ = [
('RetCode', c_char * 5), # ///响应代码
('RetInfo', c_char * 129), # 响应信息
('FutureAccount', c_char * 13), # 资金账户
('TradeAmt', c_double), # 银行余额
('UseAmt', c_double), # 银行可用余额
('FetchAmt', c_double), # 银行可取余额
('CurrencyCode', c_char * 4) # 币种
]
class TransferQryDetailReqField(BaseField):
"""查询银行交易明细请求,TradeCode=204999"""
_fields_ = [
('FutureAccount', c_char * 13) # ///期货资金账户
]
class TransferQryDetailRspField(BaseField):
"""查询银行交易明细请求响应"""
_fields_ = [
('TradeDate', c_char * 9), # ///交易日期
('TradeTime', c_char * 9), # 交易时间
('TradeCode', c_char * 7), # 交易代码
('FutureSerial', c_int), # 期货流水号
('FutureID', c_char * 11), # 期货公司代码
('FutureAccount', c_char * 22), # 资金帐号
('BankSerial', c_int), # 银行流水号
('BankID', c_char * 4), # 银行代码
('BankBrchID', c_char * 5), # 银行分中心代码
('BankAccount', c_char * 41), # 银行账号
('CertCode', c_char * 21), # 证件号码
('CurrencyCode', c_char * 4), # 货币代码
('TxAmount', c_double), # 发生金额
('Flag', c_char * 1) # 有效标志
]
class RspInfoField(BaseField):
"""响应信息"""
_fields_ = [
('ErrorID', c_int), # ///错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class ExchangeField(BaseField):
"""交易所"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ExchangeName', c_char * 61), # 交易所名称
('ExchangeProperty', c_char * 1) # 交易所属性
]
class ProductField(BaseField):
"""产品"""
_fields_ = [
('ProductID', c_char * 31), # ///产品代码
('ProductName', c_char * 21), # 产品名称
('ExchangeID', c_char * 9), # 交易所代码
('ProductClass', c_char * 1), # 产品类型
('VolumeMultiple', c_int), # 合约数量乘数
('PriceTick', c_double), # 最小变动价位
('MaxMarketOrderVolume', c_int), # 市价单最大下单量
('MinMarketOrderVolume', c_int), # 市价单最小下单量
('MaxLimitOrderVolume', c_int), # 限价单最大下单量
('MinLimitOrderVolume', c_int), # 限价单最小下单量
('PositionType', c_char * 1), # 持仓类型
('PositionDateType', c_char * 1), # 持仓日期类型
('CloseDealType', c_char * 1), # 平仓处理类型
('TradeCurrencyID', c_char * 4), # 交易币种类型
('MortgageFundUseRange', c_char * 1), # 质押资金可用范围
('ExchangeProductID', c_char * 31), # 交易所产品代码
('UnderlyingMultiple', c_double) # 合约基础商品乘数
]
class InstrumentField(BaseField):
"""合约"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InstrumentName', c_char * 21), # 合约名称
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ProductID', c_char * 31), # 产品代码
('ProductClass', c_char * 1), # 产品类型
('DeliveryYear', c_int), # 交割年份
('DeliveryMonth', c_int), # 交割月
('MaxMarketOrderVolume', c_int), # 市价单最大下单量
('MinMarketOrderVolume', c_int), # 市价单最小下单量
('MaxLimitOrderVolume', c_int), # 限价单最大下单量
('MinLimitOrderVolume', c_int), # 限价单最小下单量
('VolumeMultiple', c_int), # 合约数量乘数
('PriceTick', c_double), # 最小变动价位
('CreateDate', c_char * 9), # 创建日
('OpenDate', c_char * 9), # 上市日
('ExpireDate', c_char * 9), # 到期日
('StartDelivDate', c_char * 9), # 开始交割日
('EndDelivDate', c_char * 9), # 结束交割日
('InstLifePhase', c_char * 1), # 合约生命周期状态
('IsTrading', c_int), # 当前是否交易
('PositionType', c_char * 1), # 持仓类型
('PositionDateType', c_char * 1), # 持仓日期类型
('LongMarginRatio', c_double), # 多头保证金率
('ShortMarginRatio', c_double), # 空头保证金率
('MaxMarginSideAlgorithm', c_char * 1), # 是否使用大额单边保证金算法
('UnderlyingInstrID', c_char * 31), # 基础商品代码
('StrikePrice', c_double), # 执行价
('OptionsType', c_char * 1), # 期权类型
('UnderlyingMultiple', c_double), # 合约基础商品乘数
('CombinationType', c_char * 1) # 组合类型
]
class BrokerField(BaseField):
"""经纪公司"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('BrokerAbbr', c_char * 9), # 经纪公司简称
('BrokerName', c_char * 81), # 经纪公司名称
('IsActive', c_int) # 是否活跃
]
class TraderField(BaseField):
"""交易所交易员"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('TraderID', c_char * 21), # 交易所交易员代码
('ParticipantID', c_char * 11), # 会员代码
('Password', c_char * 41), # 密码
('InstallCount', c_int), # 安装数量
('BrokerID', c_char * 11) # 经纪公司代码
]
class InvestorField(BaseField):
"""投资者"""
_fields_ = [
('InvestorID', c_char * 13), # ///投资者代码
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorGroupID', c_char * 13), # 投资者分组代码
('InvestorName', c_char * 81), # 投资者名称
('IdentifiedCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('IsActive', c_int), # 是否活跃
('Telephone', c_char * 41), # 联系电话
('Address', c_char * 101), # 通讯地址
('OpenDate', c_char * 9), # 开户日期
('Mobile', c_char * 41), # 手机
('CommModelID', c_char * 13), # 手续费率模板代码
('MarginModelID', c_char * 13) # 保证金率模板代码
]
class TradingCodeField(BaseField):
"""交易编码"""
_fields_ = [
('InvestorID', c_char * 13), # ///投资者代码
('BrokerID', c_char * 11), # 经纪公司代码
('ExchangeID', c_char * 9), # 交易所代码
('ClientID', c_char * 11), # 客户代码
('IsActive', c_int), # 是否活跃
('ClientIDType', c_char * 1), # 交易编码类型
('BranchID', c_char * 9), # 营业部编号
('BizType', c_char * 1), # 业务类型
('InvestUnitID', c_char * 17) # 投资单元代码
]
class PartBrokerField(BaseField):
"""会员编码和经纪公司编码对照表"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('IsActive', c_int) # 是否活跃
]
class SuperUserField(BaseField):
"""管理用户"""
_fields_ = [
('UserID', c_char * 16), # ///用户代码
('UserName', c_char * 81), # 用户名称
('Password', c_char * 41), # 密码
('IsActive', c_int) # 是否活跃
]
class SuperUserFunctionField(BaseField):
"""管理用户功能权限"""
_fields_ = [
('UserID', c_char * 16), # ///用户代码
('FunctionCode', c_char * 1) # 功能代码
]
class InvestorGroupField(BaseField):
"""投资者组"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorGroupID', c_char * 13), # 投资者分组代码
('InvestorGroupName', c_char * 41) # 投资者分组名称
]
class TradingAccountField(BaseField):
"""资金账户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('PreMortgage', c_double), # 上次质押金额
('PreCredit', c_double), # 上次信用额度
('PreDeposit', c_double), # 上次存款额
('PreBalance', c_double), # 上次结算准备金
('PreMargin', c_double), # 上次占用的保证金
('InterestBase', c_double), # 利息基数
('Interest', c_double), # 利息收入
('Deposit', c_double), # 入金金额
('Withdraw', c_double), # 出金金额
('FrozenMargin', c_double), # 冻结的保证金
('FrozenCash', c_double), # 冻结的资金
('FrozenCommission', c_double), # 冻结的手续费
('CurrMargin', c_double), # 当前保证金总额
('CashIn', c_double), # 资金差额
('Commission', c_double), # 手续费
('CloseProfit', c_double), # 平仓盈亏
('PositionProfit', c_double), # 持仓盈亏
('Balance', c_double), # 期货结算准备金
('Available', c_double), # 可用资金
('WithdrawQuota', c_double), # 可取资金
('Reserve', c_double), # 基本准备金
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('Credit', c_double), # 信用额度
('Mortgage', c_double), # 质押金额
('ExchangeMargin', c_double), # 交易所保证金
('DeliveryMargin', c_double), # 投资者交割保证金
('ExchangeDeliveryMargin', c_double), # 交易所交割保证金
('ReserveBalance', c_double), # 保底期货结算准备金
('CurrencyID', c_char * 4), # 币种代码
('PreFundMortgageIn', c_double), # 上次货币质入金额
('PreFundMortgageOut', c_double), # 上次货币质出金额
('FundMortgageIn', c_double), # 货币质入金额
('FundMortgageOut', c_double), # 货币质出金额
('FundMortgageAvailable', c_double), # 货币质押余额
('MortgageableFund', c_double), # 可质押货币金额
('SpecProductMargin', c_double), # 特殊产品占用保证金
('SpecProductFrozenMargin', c_double), # 特殊产品冻结保证金
('SpecProductCommission', c_double), # 特殊产品手续费
('SpecProductFrozenCommission', c_double), # 特殊产品冻结手续费
('SpecProductPositionProfit', c_double), # 特殊产品持仓盈亏
('SpecProductCloseProfit', c_double), # 特殊产品平仓盈亏
('SpecProductPositionProfitByAlg', c_double), # 根据持仓盈亏算法计算的特殊产品持仓盈亏
('SpecProductExchangeMargin', c_double), # 特殊产品交易所保证金
('BizType', c_char * 1), # 业务类型
('FrozenSwap', c_double), # 延时换汇冻结金额
('RemainSwap', c_double) # 剩余换汇额度
]
class InvestorPositionField(BaseField):
"""投资者持仓"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('PosiDirection', c_char * 1), # 持仓多空方向
('HedgeFlag', c_char * 1), # 投机套保标志
('PositionDate', c_char * 1), # 持仓日期
('YdPosition', c_int), # 上日持仓
('Position', c_int), # 今日持仓
('LongFrozen', c_int), # 多头冻结
('ShortFrozen', c_int), # 空头冻结
('LongFrozenAmount', c_double), # 开仓冻结金额
('ShortFrozenAmount', c_double), # 开仓冻结金额
('OpenVolume', c_int), # 开仓量
('CloseVolume', c_int), # 平仓量
('OpenAmount', c_double), # 开仓金额
('CloseAmount', c_double), # 平仓金额
('PositionCost', c_double), # 持仓成本
('PreMargin', c_double), # 上次占用的保证金
('UseMargin', c_double), # 占用的保证金
('FrozenMargin', c_double), # 冻结的保证金
('FrozenCash', c_double), # 冻结的资金
('FrozenCommission', c_double), # 冻结的手续费
('CashIn', c_double), # 资金差额
('Commission', c_double), # 手续费
('CloseProfit', c_double), # 平仓盈亏
('PositionProfit', c_double), # 持仓盈亏
('PreSettlementPrice', c_double), # 上次结算价
('SettlementPrice', c_double), # 本次结算价
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OpenCost', c_double), # 开仓成本
('ExchangeMargin', c_double), # 交易所保证金
('CombPosition', c_int), # 组合成交形成的持仓
('CombLongFrozen', c_int), # 组合多头冻结
('CombShortFrozen', c_int), # 组合空头冻结
('CloseProfitByDate', c_double), # 逐日盯市平仓盈亏
('CloseProfitByTrade', c_double), # 逐笔对冲平仓盈亏
('TodayPosition', c_int), # 今日持仓
('MarginRateByMoney', c_double), # 保证金率
('MarginRateByVolume', c_double), # 保证金率(按手数)
('StrikeFrozen', c_int), # 执行冻结
('StrikeFrozenAmount', c_double), # 执行冻结金额
('AbandonFrozen', c_int), # 放弃执行冻结
('ExchangeID', c_char * 9), # 交易所代码
('YdStrikeFrozen', c_int), # 执行冻结的昨仓
('InvestUnitID', c_char * 17), # 投资单元代码
('PositionCostOffset', c_double) # 大商所持仓成本差值,只有大商所使用
]
class InstrumentMarginRateField(BaseField):
"""合约保证金率"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('HedgeFlag', c_char * 1), # 投机套保标志
('LongMarginRatioByMoney', c_double), # 多头保证金率
('LongMarginRatioByVolume', c_double), # 多头保证金费
('ShortMarginRatioByMoney', c_double), # 空头保证金率
('ShortMarginRatioByVolume', c_double), # 空头保证金费
('IsRelative', c_int), # 是否相对交易所收取
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class InstrumentCommissionRateField(BaseField):
"""合约手续费率"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OpenRatioByMoney', c_double), # 开仓手续费率
('OpenRatioByVolume', c_double), # 开仓手续费
('CloseRatioByMoney', c_double), # 平仓手续费率
('CloseRatioByVolume', c_double), # 平仓手续费
('CloseTodayRatioByMoney', c_double), # 平今手续费率
('CloseTodayRatioByVolume', c_double), # 平今手续费
('ExchangeID', c_char * 9), # 交易所代码
('BizType', c_char * 1), # 业务类型
('InvestUnitID', c_char * 17) # 投资单元代码
]
class DepthMarketDataField(BaseField):
"""深度行情"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('LastPrice', c_double), # 最新价
('PreSettlementPrice', c_double), # 上次结算价
('PreClosePrice', c_double), # 昨收盘
('PreOpenInterest', c_double), # 昨持仓量
('OpenPrice', c_double), # 今开盘
('HighestPrice', c_double), # 最高价
('LowestPrice', c_double), # 最低价
('Volume', c_int), # 数量
('Turnover', c_double), # 成交金额
('OpenInterest', c_double), # 持仓量
('ClosePrice', c_double), # 今收盘
('SettlementPrice', c_double), # 本次结算价
('UpperLimitPrice', c_double), # 涨停板价
('LowerLimitPrice', c_double), # 跌停板价
('PreDelta', c_double), # 昨虚实度
('CurrDelta', c_double), # 今虚实度
('UpdateTime', c_char * 9), # 最后修改时间
('UpdateMillisec', c_int), # 最后修改毫秒
('BidPrice1', c_double), # 申买价一
('BidVolume1', c_int), # 申买量一
('AskPrice1', c_double), # 申卖价一
('AskVolume1', c_int), # 申卖量一
('BidPrice2', c_double), # 申买价二
('BidVolume2', c_int), # 申买量二
('AskPrice2', c_double), # 申卖价二
('AskVolume2', c_int), # 申卖量二
('BidPrice3', c_double), # 申买价三
('BidVolume3', c_int), # 申买量三
('AskPrice3', c_double), # 申卖价三
('AskVolume3', c_int), # 申卖量三
('BidPrice4', c_double), # 申买价四
('BidVolume4', c_int), # 申买量四
('AskPrice4', c_double), # 申卖价四
('AskVolume4', c_int), # 申卖量四
('BidPrice5', c_double), # 申买价五
('BidVolume5', c_int), # 申买量五
('AskPrice5', c_double), # 申卖价五
('AskVolume5', c_int), # 申卖量五
('AveragePrice', c_double), # 当日均价
('ActionDay', c_char * 9) # 业务日期
]
class InstrumentTradingRightField(BaseField):
"""投资者合约交易权限"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('TradingRight', c_char * 1) # 交易权限
]
class BrokerUserField(BaseField):
"""经纪公司用户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserName', c_char * 81), # 用户名称
('UserType', c_char * 1), # 用户类型
('IsActive', c_int), # 是否活跃
('IsUsingOTP', c_int), # 是否使用令牌
('IsAuthForce', c_int) # 是否强制终端认证
]
class BrokerUserPasswordField(BaseField):
"""经纪公司用户口令"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('Password', c_char * 41), # 密码
('LastUpdateTime', c_char * 17), # 上次修改时间
('LastLoginTime', c_char * 17), # 上次登陆时间
('ExpireDate', c_char * 9), # 密码过期时间
('WeakExpireDate', c_char * 9) # 弱密码过期时间
]
class BrokerUserFunctionField(BaseField):
"""经纪公司用户功能权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('BrokerFunctionCode', c_char * 1) # 经纪公司功能代码
]
class TraderOfferField(BaseField):
"""交易所交易员报盘机"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('TraderID', c_char * 21), # 交易所交易员代码
('ParticipantID', c_char * 11), # 会员代码
('Password', c_char * 41), # 密码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('TraderConnectStatus', c_char * 1), # 交易所交易员连接状态
('ConnectRequestDate', c_char * 9), # 发出连接请求的日期
('ConnectRequestTime', c_char * 9), # 发出连接请求的时间
('LastReportDate', c_char * 9), # 上次报告日期
('LastReportTime', c_char * 9), # 上次报告时间
('ConnectDate', c_char * 9), # 完成连接日期
('ConnectTime', c_char * 9), # 完成连接时间
('StartDate', c_char * 9), # 启动日期
('StartTime', c_char * 9), # 启动时间
('TradingDay', c_char * 9), # 交易日
('BrokerID', c_char * 11), # 经纪公司代码
('MaxTradeID', c_char * 21), # 本席位最大成交编号
('MaxOrderMessageReference', c_char * 7) # 本席位最大报单备拷
]
class SettlementInfoField(BaseField):
"""投资者结算结果"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('SettlementID', c_int), # 结算编号
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('SequenceNo', c_int), # 序号
('Content', c_char * 501), # 消息正文
('AccountID', c_char * 13), # 投资者帐号
('CurrencyID', c_char * 4) # 币种代码
]
class InstrumentMarginRateAdjustField(BaseField):
"""合约保证金率调整"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('HedgeFlag', c_char * 1), # 投机套保标志
('LongMarginRatioByMoney', c_double), # 多头保证金率
('LongMarginRatioByVolume', c_double), # 多头保证金费
('ShortMarginRatioByMoney', c_double), # 空头保证金率
('ShortMarginRatioByVolume', c_double), # 空头保证金费
('IsRelative', c_int) # 是否相对交易所收取
]
class ExchangeMarginRateField(BaseField):
"""交易所保证金率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('LongMarginRatioByMoney', c_double), # 多头保证金率
('LongMarginRatioByVolume', c_double), # 多头保证金费
('ShortMarginRatioByMoney', c_double), # 空头保证金率
('ShortMarginRatioByVolume', c_double), # 空头保证金费
('ExchangeID', c_char * 9) # 交易所代码
]
class ExchangeMarginRateAdjustField(BaseField):
"""交易所保证金率调整"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('LongMarginRatioByMoney', c_double), # 跟随交易所投资者多头保证金率
('LongMarginRatioByVolume', c_double), # 跟随交易所投资者多头保证金费
('ShortMarginRatioByMoney', c_double), # 跟随交易所投资者空头保证金率
('ShortMarginRatioByVolume', c_double), # 跟随交易所投资者空头保证金费
('ExchLongMarginRatioByMoney', c_double), # 交易所多头保证金率
('ExchLongMarginRatioByVolume', c_double), # 交易所多头保证金费
('ExchShortMarginRatioByMoney', c_double), # 交易所空头保证金率
('ExchShortMarginRatioByVolume', c_double), # 交易所空头保证金费
('NoLongMarginRatioByMoney', c_double), # 不跟随交易所投资者多头保证金率
('NoLongMarginRatioByVolume', c_double), # 不跟随交易所投资者多头保证金费
('NoShortMarginRatioByMoney', c_double), # 不跟随交易所投资者空头保证金率
('NoShortMarginRatioByVolume', c_double) # 不跟随交易所投资者空头保证金费
]
class ExchangeRateField(BaseField):
"""汇率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('FromCurrencyID', c_char * 4), # 源币种
('FromCurrencyUnit', c_double), # 源币种单位数量
('ToCurrencyID', c_char * 4), # 目标币种
('ExchangeRate', c_double) # 汇率
]
class SettlementRefField(BaseField):
"""结算引用"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('SettlementID', c_int) # 结算编号
]
class CurrentTimeField(BaseField):
"""当前时间"""
_fields_ = [
('CurrDate', c_char * 9), # ///当前日期
('CurrTime', c_char * 9), # 当前时间
('CurrMillisec', c_int), # 当前时间(毫秒)
('ActionDay', c_char * 9) # 业务日期
]
class CommPhaseField(BaseField):
"""通讯阶段"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('CommPhaseNo', c_short), # 通讯时段编号
('SystemID', c_char * 21) # 系统编号
]
class LoginInfoField(BaseField):
"""登录信息"""
_fields_ = [
('FrontID', c_int), # ///前置编号
('SessionID', c_int), # 会话编号
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('LoginDate', c_char * 9), # 登录日期
('LoginTime', c_char * 9), # 登录时间
('IPAddress', c_char * 16), # IP地址
('UserProductInfo', c_char * 11), # 用户端产品信息
('InterfaceProductInfo', c_char * 11), # 接口端产品信息
('ProtocolInfo', c_char * 11), # 协议信息
('SystemName', c_char * 41), # 系统名称
('PasswordDeprecated', c_char * 41), # 密码,已弃用
('MaxOrderRef', c_char * 13), # 最大报单引用
('SHFETime', c_char * 9), # 上期所时间
('DCETime', c_char * 9), # 大商所时间
('CZCETime', c_char * 9), # 郑商所时间
('FFEXTime', c_char * 9), # 中金所时间
('MacAddress', c_char * 21), # Mac地址
('OneTimePassword', c_char * 41), # 动态密码
('INETime', c_char * 9), # 能源中心时间
('IsQryControl', c_int), # 查询时是否需要流控
('LoginRemark', c_char * 36), # 登录备注
('Password', c_char * 41) # 密码
]
class LogoutAllField(BaseField):
"""登录信息"""
_fields_ = [
('FrontID', c_int), # ///前置编号
('SessionID', c_int), # 会话编号
('SystemName', c_char * 41) # 系统名称
]
class FrontStatusField(BaseField):
"""前置状态"""
_fields_ = [
('FrontID', c_int), # ///前置编号
('LastReportDate', c_char * 9), # 上次报告日期
('LastReportTime', c_char * 9), # 上次报告时间
('IsActive', c_int) # 是否活跃
]
class UserPasswordUpdateField(BaseField):
"""用户口令变更"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('OldPassword', c_char * 41), # 原来的口令
('NewPassword', c_char * 41) # 新的口令
]
class InputOrderField(BaseField):
"""输入报单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('UserID', c_char * 16), # 用户代码
('OrderPriceType', c_char * 1), # 报单价格条件
('Direction', c_char * 1), # 买卖方向
('CombOffsetFlag', c_char * 5), # 组合开平标志
('CombHedgeFlag', c_char * 5), # 组合投机套保标志
('LimitPrice', c_double), # 价格
('VolumeTotalOriginal', c_int), # 数量
('TimeCondition', c_char * 1), # 有效期类型
('GTDDate', c_char * 9), # GTD日期
('VolumeCondition', c_char * 1), # 成交量类型
('MinVolume', c_int), # 最小成交量
('ContingentCondition', c_char * 1), # 触发条件
('StopPrice', c_double), # 止损价
('ForceCloseReason', c_char * 1), # 强平原因
('IsAutoSuspend', c_int), # 自动挂起标志
('BusinessUnit', c_char * 21), # 业务单元
('RequestID', c_int), # 请求编号
('UserForceClose', c_int), # 用户强评标志
('IsSwapOrder', c_int), # 互换单标志
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class OrderField(BaseField):
"""报单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('UserID', c_char * 16), # 用户代码
('OrderPriceType', c_char * 1), # 报单价格条件
('Direction', c_char * 1), # 买卖方向
('CombOffsetFlag', c_char * 5), # 组合开平标志
('CombHedgeFlag', c_char * 5), # 组合投机套保标志
('LimitPrice', c_double), # 价格
('VolumeTotalOriginal', c_int), # 数量
('TimeCondition', c_char * 1), # 有效期类型
('GTDDate', c_char * 9), # GTD日期
('VolumeCondition', c_char * 1), # 成交量类型
('MinVolume', c_int), # 最小成交量
('ContingentCondition', c_char * 1), # 触发条件
('StopPrice', c_double), # 止损价
('ForceCloseReason', c_char * 1), # 强平原因
('IsAutoSuspend', c_int), # 自动挂起标志
('BusinessUnit', c_char * 21), # 业务单元
('RequestID', c_int), # 请求编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 报单提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OrderSysID', c_char * 21), # 报单编号
('OrderSource', c_char * 1), # 报单来源
('OrderStatus', c_char * 1), # 报单状态
('OrderType', c_char * 1), # 报单类型
('VolumeTraded', c_int), # 今成交数量
('VolumeTotal', c_int), # 剩余数量
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 委托时间
('ActiveTime', c_char * 9), # 激活时间
('SuspendTime', c_char * 9), # 挂起时间
('UpdateTime', c_char * 9), # 最后修改时间
('CancelTime', c_char * 9), # 撤销时间
('ActiveTraderID', c_char * 21), # 最后修改交易所交易员代码
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('UserProductInfo', c_char * 11), # 用户端产品信息
('StatusMsg', c_char * 81), # 状态信息
('UserForceClose', c_int), # 用户强评标志
('ActiveUserID', c_char * 16), # 操作用户代码
('BrokerOrderSeq', c_int), # 经纪公司报单编号
('RelativeOrderSysID', c_char * 21), # 相关报单
('ZCETotalTradedVolume', c_int), # 郑商所成交数量
('IsSwapOrder', c_int), # 互换单标志
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExchangeOrderField(BaseField):
"""交易所报单"""
_fields_ = [
('OrderPriceType', c_char * 1), # ///报单价格条件
('Direction', c_char * 1), # 买卖方向
('CombOffsetFlag', c_char * 5), # 组合开平标志
('CombHedgeFlag', c_char * 5), # 组合投机套保标志
('LimitPrice', c_double), # 价格
('VolumeTotalOriginal', c_int), # 数量
('TimeCondition', c_char * 1), # 有效期类型
('GTDDate', c_char * 9), # GTD日期
('VolumeCondition', c_char * 1), # 成交量类型
('MinVolume', c_int), # 最小成交量
('ContingentCondition', c_char * 1), # 触发条件
('StopPrice', c_double), # 止损价
('ForceCloseReason', c_char * 1), # 强平原因
('IsAutoSuspend', c_int), # 自动挂起标志
('BusinessUnit', c_char * 21), # 业务单元
('RequestID', c_int), # 请求编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 报单提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OrderSysID', c_char * 21), # 报单编号
('OrderSource', c_char * 1), # 报单来源
('OrderStatus', c_char * 1), # 报单状态
('OrderType', c_char * 1), # 报单类型
('VolumeTraded', c_int), # 今成交数量
('VolumeTotal', c_int), # 剩余数量
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 委托时间
('ActiveTime', c_char * 9), # 激活时间
('SuspendTime', c_char * 9), # 挂起时间
('UpdateTime', c_char * 9), # 最后修改时间
('CancelTime', c_char * 9), # 撤销时间
('ActiveTraderID', c_char * 21), # 最后修改交易所交易员代码
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExchangeOrderInsertErrorField(BaseField):
"""交易所报单插入失败"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ParticipantID', c_char * 11), # 会员代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class InputOrderActionField(BaseField):
"""输入报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OrderActionRef', c_int), # 报单操作引用
('OrderRef', c_char * 13), # 报单引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('OrderSysID', c_char * 21), # 报单编号
('ActionFlag', c_char * 1), # 操作标志
('LimitPrice', c_double), # 价格
('VolumeChange', c_int), # 数量变化
('UserID', c_char * 16), # 用户代码
('InstrumentID', c_char * 31), # 合约代码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class OrderActionField(BaseField):
"""报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OrderActionRef', c_int), # 报单操作引用
('OrderRef', c_char * 13), # 报单引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('OrderSysID', c_char * 21), # 报单编号
('ActionFlag', c_char * 1), # 操作标志
('LimitPrice', c_double), # 价格
('VolumeChange', c_int), # 数量变化
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('StatusMsg', c_char * 81), # 状态信息
('InstrumentID', c_char * 31), # 合约代码
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExchangeOrderActionField(BaseField):
"""交易所报单操作"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('OrderSysID', c_char * 21), # 报单编号
('ActionFlag', c_char * 1), # 操作标志
('LimitPrice', c_double), # 价格
('VolumeChange', c_int), # 数量变化
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExchangeOrderActionErrorField(BaseField):
"""交易所报单操作失败"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('OrderSysID', c_char * 21), # 报单编号
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class ExchangeTradeField(BaseField):
"""交易所成交"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('TradeID', c_char * 21), # 成交编号
('Direction', c_char * 1), # 买卖方向
('OrderSysID', c_char * 21), # 报单编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('TradingRole', c_char * 1), # 交易角色
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('Price', c_double), # 价格
('Volume', c_int), # 数量
('TradeDate', c_char * 9), # 成交时期
('TradeTime', c_char * 9), # 成交时间
('TradeType', c_char * 1), # 成交类型
('PriceSource', c_char * 1), # 成交价来源
('TraderID', c_char * 21), # 交易所交易员代码
('OrderLocalID', c_char * 13), # 本地报单编号
('ClearingPartID', c_char * 11), # 结算会员编号
('BusinessUnit', c_char * 21), # 业务单元
('SequenceNo', c_int), # 序号
('TradeSource', c_char * 1) # 成交来源
]
class TradeField(BaseField):
"""成交"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('UserID', c_char * 16), # 用户代码
('ExchangeID', c_char * 9), # 交易所代码
('TradeID', c_char * 21), # 成交编号
('Direction', c_char * 1), # 买卖方向
('OrderSysID', c_char * 21), # 报单编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('TradingRole', c_char * 1), # 交易角色
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('Price', c_double), # 价格
('Volume', c_int), # 数量
('TradeDate', c_char * 9), # 成交时期
('TradeTime', c_char * 9), # 成交时间
('TradeType', c_char * 1), # 成交类型
('PriceSource', c_char * 1), # 成交价来源
('TraderID', c_char * 21), # 交易所交易员代码
('OrderLocalID', c_char * 13), # 本地报单编号
('ClearingPartID', c_char * 11), # 结算会员编号
('BusinessUnit', c_char * 21), # 业务单元
('SequenceNo', c_int), # 序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('BrokerOrderSeq', c_int), # 经纪公司报单编号
('TradeSource', c_char * 1), # 成交来源
('InvestUnitID', c_char * 17) # 投资单元代码
]
class UserSessionField(BaseField):
"""用户会话"""
_fields_ = [
('FrontID', c_int), # ///前置编号
('SessionID', c_int), # 会话编号
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('LoginDate', c_char * 9), # 登录日期
('LoginTime', c_char * 9), # 登录时间
('IPAddress', c_char * 16), # IP地址
('UserProductInfo', c_char * 11), # 用户端产品信息
('InterfaceProductInfo', c_char * 11), # 接口端产品信息
('ProtocolInfo', c_char * 11), # 协议信息
('MacAddress', c_char * 21), # Mac地址
('LoginRemark', c_char * 36) # 登录备注
]
class QueryMaxOrderVolumeField(BaseField):
"""查询最大报单数量"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('Direction', c_char * 1), # 买卖方向
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('MaxVolume', c_int), # 最大允许报单数量
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class SettlementInfoConfirmField(BaseField):
"""投资者结算结果确认信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ConfirmDate', c_char * 9), # 确认日期
('ConfirmTime', c_char * 9), # 确认时间
('SettlementID', c_int), # 结算编号
('AccountID', c_char * 13), # 投资者帐号
('CurrencyID', c_char * 4) # 币种代码
]
class SyncDepositField(BaseField):
"""出入金同步"""
_fields_ = [
('DepositSeqNo', c_char * 15), # ///出入金流水号
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('Deposit', c_double), # 入金金额
('IsForce', c_int), # 是否强制进行
('CurrencyID', c_char * 4) # 币种代码
]
class SyncFundMortgageField(BaseField):
"""货币质押同步"""
_fields_ = [
('MortgageSeqNo', c_char * 15), # ///货币质押流水号
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('FromCurrencyID', c_char * 4), # 源币种
('MortgageAmount', c_double), # 质押金额
('ToCurrencyID', c_char * 4) # 目标币种
]
class BrokerSyncField(BaseField):
"""经纪公司同步"""
_fields_ = [
('BrokerID', c_char * 11) # ///经纪公司代码
]
class SyncingInvestorField(BaseField):
"""正在同步中的投资者"""
_fields_ = [
('InvestorID', c_char * 13), # ///投资者代码
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorGroupID', c_char * 13), # 投资者分组代码
('InvestorName', c_char * 81), # 投资者名称
('IdentifiedCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('IsActive', c_int), # 是否活跃
('Telephone', c_char * 41), # 联系电话
('Address', c_char * 101), # 通讯地址
('OpenDate', c_char * 9), # 开户日期
('Mobile', c_char * 41), # 手机
('CommModelID', c_char * 13), # 手续费率模板代码
('MarginModelID', c_char * 13) # 保证金率模板代码
]
class SyncingTradingCodeField(BaseField):
"""正在同步中的交易代码"""
_fields_ = [
('InvestorID', c_char * 13), # ///投资者代码
('BrokerID', c_char * 11), # 经纪公司代码
('ExchangeID', c_char * 9), # 交易所代码
('ClientID', c_char * 11), # 客户代码
('IsActive', c_int), # 是否活跃
('ClientIDType', c_char * 1) # 交易编码类型
]
class SyncingInvestorGroupField(BaseField):
"""正在同步中的投资者分组"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorGroupID', c_char * 13), # 投资者分组代码
('InvestorGroupName', c_char * 41) # 投资者分组名称
]
class SyncingTradingAccountField(BaseField):
"""正在同步中的交易账号"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('PreMortgage', c_double), # 上次质押金额
('PreCredit', c_double), # 上次信用额度
('PreDeposit', c_double), # 上次存款额
('PreBalance', c_double), # 上次结算准备金
('PreMargin', c_double), # 上次占用的保证金
('InterestBase', c_double), # 利息基数
('Interest', c_double), # 利息收入
('Deposit', c_double), # 入金金额
('Withdraw', c_double), # 出金金额
('FrozenMargin', c_double), # 冻结的保证金
('FrozenCash', c_double), # 冻结的资金
('FrozenCommission', c_double), # 冻结的手续费
('CurrMargin', c_double), # 当前保证金总额
('CashIn', c_double), # 资金差额
('Commission', c_double), # 手续费
('CloseProfit', c_double), # 平仓盈亏
('PositionProfit', c_double), # 持仓盈亏
('Balance', c_double), # 期货结算准备金
('Available', c_double), # 可用资金
('WithdrawQuota', c_double), # 可取资金
('Reserve', c_double), # 基本准备金
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('Credit', c_double), # 信用额度
('Mortgage', c_double), # 质押金额
('ExchangeMargin', c_double), # 交易所保证金
('DeliveryMargin', c_double), # 投资者交割保证金
('ExchangeDeliveryMargin', c_double), # 交易所交割保证金
('ReserveBalance', c_double), # 保底期货结算准备金
('CurrencyID', c_char * 4), # 币种代码
('PreFundMortgageIn', c_double), # 上次货币质入金额
('PreFundMortgageOut', c_double), # 上次货币质出金额
('FundMortgageIn', c_double), # 货币质入金额
('FundMortgageOut', c_double), # 货币质出金额
('FundMortgageAvailable', c_double), # 货币质押余额
('MortgageableFund', c_double), # 可质押货币金额
('SpecProductMargin', c_double), # 特殊产品占用保证金
('SpecProductFrozenMargin', c_double), # 特殊产品冻结保证金
('SpecProductCommission', c_double), # 特殊产品手续费
('SpecProductFrozenCommission', c_double), # 特殊产品冻结手续费
('SpecProductPositionProfit', c_double), # 特殊产品持仓盈亏
('SpecProductCloseProfit', c_double), # 特殊产品平仓盈亏
('SpecProductPositionProfitByAlg', c_double), # 根据持仓盈亏算法计算的特殊产品持仓盈亏
('SpecProductExchangeMargin', c_double), # 特殊产品交易所保证金
('FrozenSwap', c_double), # 延时换汇冻结金额
('RemainSwap', c_double) # 剩余换汇额度
]
class SyncingInvestorPositionField(BaseField):
"""正在同步中的投资者持仓"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('PosiDirection', c_char * 1), # 持仓多空方向
('HedgeFlag', c_char * 1), # 投机套保标志
('PositionDate', c_char * 1), # 持仓日期
('YdPosition', c_int), # 上日持仓
('Position', c_int), # 今日持仓
('LongFrozen', c_int), # 多头冻结
('ShortFrozen', c_int), # 空头冻结
('LongFrozenAmount', c_double), # 开仓冻结金额
('ShortFrozenAmount', c_double), # 开仓冻结金额
('OpenVolume', c_int), # 开仓量
('CloseVolume', c_int), # 平仓量
('OpenAmount', c_double), # 开仓金额
('CloseAmount', c_double), # 平仓金额
('PositionCost', c_double), # 持仓成本
('PreMargin', c_double), # 上次占用的保证金
('UseMargin', c_double), # 占用的保证金
('FrozenMargin', c_double), # 冻结的保证金
('FrozenCash', c_double), # 冻结的资金
('FrozenCommission', c_double), # 冻结的手续费
('CashIn', c_double), # 资金差额
('Commission', c_double), # 手续费
('CloseProfit', c_double), # 平仓盈亏
('PositionProfit', c_double), # 持仓盈亏
('PreSettlementPrice', c_double), # 上次结算价
('SettlementPrice', c_double), # 本次结算价
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OpenCost', c_double), # 开仓成本
('ExchangeMargin', c_double), # 交易所保证金
('CombPosition', c_int), # 组合成交形成的持仓
('CombLongFrozen', c_int), # 组合多头冻结
('CombShortFrozen', c_int), # 组合空头冻结
('CloseProfitByDate', c_double), # 逐日盯市平仓盈亏
('CloseProfitByTrade', c_double), # 逐笔对冲平仓盈亏
('TodayPosition', c_int), # 今日持仓
('MarginRateByMoney', c_double), # 保证金率
('MarginRateByVolume', c_double), # 保证金率(按手数)
('StrikeFrozen', c_int), # 执行冻结
('StrikeFrozenAmount', c_double), # 执行冻结金额
('AbandonFrozen', c_int), # 放弃执行冻结
('ExchangeID', c_char * 9), # 交易所代码
('YdStrikeFrozen', c_int), # 执行冻结的昨仓
('InvestUnitID', c_char * 17), # 投资单元代码
('PositionCostOffset', c_double) # 大商所持仓成本差值,只有大商所使用
]
class SyncingInstrumentMarginRateField(BaseField):
"""正在同步中的合约保证金率"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('HedgeFlag', c_char * 1), # 投机套保标志
('LongMarginRatioByMoney', c_double), # 多头保证金率
('LongMarginRatioByVolume', c_double), # 多头保证金费
('ShortMarginRatioByMoney', c_double), # 空头保证金率
('ShortMarginRatioByVolume', c_double), # 空头保证金费
('IsRelative', c_int) # 是否相对交易所收取
]
class SyncingInstrumentCommissionRateField(BaseField):
"""正在同步中的合约手续费率"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OpenRatioByMoney', c_double), # 开仓手续费率
('OpenRatioByVolume', c_double), # 开仓手续费
('CloseRatioByMoney', c_double), # 平仓手续费率
('CloseRatioByVolume', c_double), # 平仓手续费
('CloseTodayRatioByMoney', c_double), # 平今手续费率
('CloseTodayRatioByVolume', c_double) # 平今手续费
]
class SyncingInstrumentTradingRightField(BaseField):
"""正在同步中的合约交易权限"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('TradingRight', c_char * 1) # 交易权限
]
class QryOrderField(BaseField):
"""查询报单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('OrderSysID', c_char * 21), # 报单编号
('InsertTimeStart', c_char * 9), # 开始时间
('InsertTimeEnd', c_char * 9), # 结束时间
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryTradeField(BaseField):
"""查询成交"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('TradeID', c_char * 21), # 成交编号
('TradeTimeStart', c_char * 9), # 开始时间
('TradeTimeEnd', c_char * 9), # 结束时间
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryInvestorPositionField(BaseField):
"""查询投资者持仓"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryTradingAccountField(BaseField):
"""查询资金账户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('CurrencyID', c_char * 4), # 币种代码
('BizType', c_char * 1), # 业务类型
('AccountID', c_char * 13) # 投资者帐号
]
class QryInvestorField(BaseField):
"""查询投资者"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class QryTradingCodeField(BaseField):
"""查询交易编码"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9), # 交易所代码
('ClientID', c_char * 11), # 客户代码
('ClientIDType', c_char * 1), # 交易编码类型
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryInvestorGroupField(BaseField):
"""查询投资者组"""
_fields_ = [
('BrokerID', c_char * 11) # ///经纪公司代码
]
class QryInstrumentMarginRateField(BaseField):
"""查询合约保证金率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryInstrumentCommissionRateField(BaseField):
"""查询手续费率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryInstrumentTradingRightField(BaseField):
"""查询合约交易权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31) # 合约代码
]
class QryBrokerField(BaseField):
"""查询经纪公司"""
_fields_ = [
('BrokerID', c_char * 11) # ///经纪公司代码
]
class QryTraderField(BaseField):
"""查询交易员"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ParticipantID', c_char * 11), # 会员代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QrySuperUserFunctionField(BaseField):
"""查询管理用户功能权限"""
_fields_ = [
('UserID', c_char * 16) # ///用户代码
]
class QryUserSessionField(BaseField):
"""查询用户会话"""
_fields_ = [
('FrontID', c_int), # ///前置编号
('SessionID', c_int), # 会话编号
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class QryPartBrokerField(BaseField):
"""查询经纪公司会员代码"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('BrokerID', c_char * 11), # 经纪公司代码
('ParticipantID', c_char * 11) # 会员代码
]
class QryFrontStatusField(BaseField):
"""查询前置状态"""
_fields_ = [
('FrontID', c_int) # ///前置编号
]
class QryExchangeOrderField(BaseField):
"""查询交易所报单"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QryOrderActionField(BaseField):
"""查询报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9) # 交易所代码
]
class QryExchangeOrderActionField(BaseField):
"""查询交易所报单操作"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QrySuperUserField(BaseField):
"""查询管理用户"""
_fields_ = [
('UserID', c_char * 16) # ///用户代码
]
class QryExchangeField(BaseField):
"""查询交易所"""
_fields_ = [
('ExchangeID', c_char * 9) # ///交易所代码
]
class QryProductField(BaseField):
"""查询产品"""
_fields_ = [
('ProductID', c_char * 31), # ///产品代码
('ProductClass', c_char * 1), # 产品类型
('ExchangeID', c_char * 9) # 交易所代码
]
class QryInstrumentField(BaseField):
"""查询合约"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('ExchangeID', c_char * 9), # 交易所代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ProductID', c_char * 31) # 产品代码
]
class QryDepthMarketDataField(BaseField):
"""查询行情"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('ExchangeID', c_char * 9) # 交易所代码
]
class QryBrokerUserField(BaseField):
"""查询经纪公司用户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class QryBrokerUserFunctionField(BaseField):
"""查询经纪公司用户权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class QryTraderOfferField(BaseField):
"""查询交易员报盘机"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ParticipantID', c_char * 11), # 会员代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QrySyncDepositField(BaseField):
"""查询出入金流水"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('DepositSeqNo', c_char * 15) # 出入金流水号
]
class QrySettlementInfoField(BaseField):
"""查询投资者结算结果"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('TradingDay', c_char * 9), # 交易日
('AccountID', c_char * 13), # 投资者帐号
('CurrencyID', c_char * 4) # 币种代码
]
class QryExchangeMarginRateField(BaseField):
"""查询交易所保证金率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('ExchangeID', c_char * 9) # 交易所代码
]
class QryExchangeMarginRateAdjustField(BaseField):
"""查询交易所调整保证金率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1) # 投机套保标志
]
class QryExchangeRateField(BaseField):
"""查询汇率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('FromCurrencyID', c_char * 4), # 源币种
('ToCurrencyID', c_char * 4) # 目标币种
]
class QrySyncFundMortgageField(BaseField):
"""查询货币质押流水"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('MortgageSeqNo', c_char * 15) # 货币质押流水号
]
class QryHisOrderField(BaseField):
"""查询报单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('OrderSysID', c_char * 21), # 报单编号
('InsertTimeStart', c_char * 9), # 开始时间
('InsertTimeEnd', c_char * 9), # 结束时间
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int) # 结算编号
]
class OptionInstrMiniMarginField(BaseField):
"""当前期权合约最小保证金"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('MinMargin', c_double), # 单位(手)期权合约最小保证金
('ValueMethod', c_char * 1), # 取值方式
('IsRelative', c_int) # 是否跟随交易所收取
]
class OptionInstrMarginAdjustField(BaseField):
"""当前期权合约保证金调整系数"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('SShortMarginRatioByMoney', c_double), # 投机空头保证金调整系数
('SShortMarginRatioByVolume', c_double), # 投机空头保证金调整系数
('HShortMarginRatioByMoney', c_double), # 保值空头保证金调整系数
('HShortMarginRatioByVolume', c_double), # 保值空头保证金调整系数
('AShortMarginRatioByMoney', c_double), # 套利空头保证金调整系数
('AShortMarginRatioByVolume', c_double), # 套利空头保证金调整系数
('IsRelative', c_int), # 是否跟随交易所收取
('MShortMarginRatioByMoney', c_double), # 做市商空头保证金调整系数
('MShortMarginRatioByVolume', c_double) # 做市商空头保证金调整系数
]
class OptionInstrCommRateField(BaseField):
"""当前期权合约手续费的详细内容"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OpenRatioByMoney', c_double), # 开仓手续费率
('OpenRatioByVolume', c_double), # 开仓手续费
('CloseRatioByMoney', c_double), # 平仓手续费率
('CloseRatioByVolume', c_double), # 平仓手续费
('CloseTodayRatioByMoney', c_double), # 平今手续费率
('CloseTodayRatioByVolume', c_double), # 平今手续费
('StrikeRatioByMoney', c_double), # 执行手续费率
('StrikeRatioByVolume', c_double), # 执行手续费
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class OptionInstrTradeCostField(BaseField):
"""期权交易成本"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('FixedMargin', c_double), # 期权合约保证金不变部分
('MiniMargin', c_double), # 期权合约最小保证金
('Royalty', c_double), # 期权合约权利金
('ExchFixedMargin', c_double), # 交易所期权合约保证金不变部分
('ExchMiniMargin', c_double), # 交易所期权合约最小保证金
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryOptionInstrTradeCostField(BaseField):
"""期权交易成本查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('InputPrice', c_double), # 期权合约报价
('UnderlyingPrice', c_double), # 标的价格,填0则用昨结算价
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryOptionInstrCommRateField(BaseField):
"""期权手续费率查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class IndexPriceField(BaseField):
"""股指现货指数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('ClosePrice', c_double) # 指数现货收盘价
]
class InputExecOrderField(BaseField):
"""输入的执行宣告"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExecOrderRef', c_char * 13), # 执行宣告引用
('UserID', c_char * 16), # 用户代码
('Volume', c_int), # 数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('ActionType', c_char * 1), # 执行类型
('PosiDirection', c_char * 1), # 保留头寸申请的持仓方向
('ReservePositionFlag', c_char * 1), # 期权行权后是否保留期货头寸的标记,该字段已废弃
('CloseFlag', c_char * 1), # 期权行权后生成的头寸是否自动平仓
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class InputExecOrderActionField(BaseField):
"""输入执行宣告操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExecOrderActionRef', c_int), # 执行宣告操作引用
('ExecOrderRef', c_char * 13), # 执行宣告引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('ExecOrderSysID', c_char * 21), # 执行宣告操作编号
('ActionFlag', c_char * 1), # 操作标志
('UserID', c_char * 16), # 用户代码
('InstrumentID', c_char * 31), # 合约代码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExecOrderField(BaseField):
"""执行宣告"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExecOrderRef', c_char * 13), # 执行宣告引用
('UserID', c_char * 16), # 用户代码
('Volume', c_int), # 数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('ActionType', c_char * 1), # 执行类型
('PosiDirection', c_char * 1), # 保留头寸申请的持仓方向
('ReservePositionFlag', c_char * 1), # 期权行权后是否保留期货头寸的标记,该字段已废弃
('CloseFlag', c_char * 1), # 期权行权后生成的头寸是否自动平仓
('ExecOrderLocalID', c_char * 13), # 本地执行宣告编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 执行宣告提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('ExecOrderSysID', c_char * 21), # 执行宣告编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('CancelTime', c_char * 9), # 撤销时间
('ExecResult', c_char * 1), # 执行结果
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('UserProductInfo', c_char * 11), # 用户端产品信息
('StatusMsg', c_char * 81), # 状态信息
('ActiveUserID', c_char * 16), # 操作用户代码
('BrokerExecOrderSeq', c_int), # 经纪公司报单编号
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExecOrderActionField(BaseField):
"""执行宣告操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExecOrderActionRef', c_int), # 执行宣告操作引用
('ExecOrderRef', c_char * 13), # 执行宣告引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('ExecOrderSysID', c_char * 21), # 执行宣告操作编号
('ActionFlag', c_char * 1), # 操作标志
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('ExecOrderLocalID', c_char * 13), # 本地执行宣告编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('ActionType', c_char * 1), # 执行类型
('StatusMsg', c_char * 81), # 状态信息
('InstrumentID', c_char * 31), # 合约代码
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryExecOrderField(BaseField):
"""执行宣告查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('ExecOrderSysID', c_char * 21), # 执行宣告编号
('InsertTimeStart', c_char * 9), # 开始时间
('InsertTimeEnd', c_char * 9) # 结束时间
]
class ExchangeExecOrderField(BaseField):
"""交易所执行宣告信息"""
_fields_ = [
('Volume', c_int), # ///数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('ActionType', c_char * 1), # 执行类型
('PosiDirection', c_char * 1), # 保留头寸申请的持仓方向
('ReservePositionFlag', c_char * 1), # 期权行权后是否保留期货头寸的标记,该字段已废弃
('CloseFlag', c_char * 1), # 期权行权后生成的头寸是否自动平仓
('ExecOrderLocalID', c_char * 13), # 本地执行宣告编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 执行宣告提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('ExecOrderSysID', c_char * 21), # 执行宣告编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('CancelTime', c_char * 9), # 撤销时间
('ExecResult', c_char * 1), # 执行结果
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryExchangeExecOrderField(BaseField):
"""交易所执行宣告查询"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QryExecOrderActionField(BaseField):
"""执行宣告操作查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9) # 交易所代码
]
class ExchangeExecOrderActionField(BaseField):
"""交易所执行宣告操作"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ExecOrderSysID', c_char * 21), # 执行宣告操作编号
('ActionFlag', c_char * 1), # 操作标志
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('ExecOrderLocalID', c_char * 13), # 本地执行宣告编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('ActionType', c_char * 1), # 执行类型
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('Volume', c_int) # 数量
]
class QryExchangeExecOrderActionField(BaseField):
"""交易所执行宣告操作查询"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class ErrExecOrderField(BaseField):
"""错误执行宣告"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExecOrderRef', c_char * 13), # 执行宣告引用
('UserID', c_char * 16), # 用户代码
('Volume', c_int), # 数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('ActionType', c_char * 1), # 执行类型
('PosiDirection', c_char * 1), # 保留头寸申请的持仓方向
('ReservePositionFlag', c_char * 1), # 期权行权后是否保留期货头寸的标记,该字段已废弃
('CloseFlag', c_char * 1), # 期权行权后生成的头寸是否自动平仓
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class QryErrExecOrderField(BaseField):
"""查询错误执行宣告"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class ErrExecOrderActionField(BaseField):
"""错误执行宣告操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExecOrderActionRef', c_int), # 执行宣告操作引用
('ExecOrderRef', c_char * 13), # 执行宣告引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('ExecOrderSysID', c_char * 21), # 执行宣告操作编号
('ActionFlag', c_char * 1), # 操作标志
('UserID', c_char * 16), # 用户代码
('InstrumentID', c_char * 31), # 合约代码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class QryErrExecOrderActionField(BaseField):
"""查询错误执行宣告操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class OptionInstrTradingRightField(BaseField):
"""投资者期权合约交易权限"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('Direction', c_char * 1), # 买卖方向
('TradingRight', c_char * 1) # 交易权限
]
class QryOptionInstrTradingRightField(BaseField):
"""查询期权合约交易权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('Direction', c_char * 1) # 买卖方向
]
class InputForQuoteField(BaseField):
"""输入的询价"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ForQuoteRef', c_char * 13), # 询价引用
('UserID', c_char * 16), # 用户代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ForQuoteField(BaseField):
"""询价"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ForQuoteRef', c_char * 13), # 询价引用
('UserID', c_char * 16), # 用户代码
('ForQuoteLocalID', c_char * 13), # 本地询价编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('ForQuoteStatus', c_char * 1), # 询价状态
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('StatusMsg', c_char * 81), # 状态信息
('ActiveUserID', c_char * 16), # 操作用户代码
('BrokerForQutoSeq', c_int), # 经纪公司询价编号
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryForQuoteField(BaseField):
"""询价查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InsertTimeStart', c_char * 9), # 开始时间
('InsertTimeEnd', c_char * 9), # 结束时间
('InvestUnitID', c_char * 17) # 投资单元代码
]
class ExchangeForQuoteField(BaseField):
"""交易所询价信息"""
_fields_ = [
('ForQuoteLocalID', c_char * 13), # ///本地询价编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('ForQuoteStatus', c_char * 1), # 询价状态
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryExchangeForQuoteField(BaseField):
"""交易所询价查询"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class InputQuoteField(BaseField):
"""输入的报价"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('QuoteRef', c_char * 13), # 报价引用
('UserID', c_char * 16), # 用户代码
('AskPrice', c_double), # 卖价格
('BidPrice', c_double), # 买价格
('AskVolume', c_int), # 卖数量
('BidVolume', c_int), # 买数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('AskOffsetFlag', c_char * 1), # 卖开平标志
('BidOffsetFlag', c_char * 1), # 买开平标志
('AskHedgeFlag', c_char * 1), # 卖投机套保标志
('BidHedgeFlag', c_char * 1), # 买投机套保标志
('AskOrderRef', c_char * 13), # 衍生卖报单引用
('BidOrderRef', c_char * 13), # 衍生买报单引用
('ForQuoteSysID', c_char * 21), # 应价编号
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class InputQuoteActionField(BaseField):
"""输入报价操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('QuoteActionRef', c_int), # 报价操作引用
('QuoteRef', c_char * 13), # 报价引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('QuoteSysID', c_char * 21), # 报价操作编号
('ActionFlag', c_char * 1), # 操作标志
('UserID', c_char * 16), # 用户代码
('InstrumentID', c_char * 31), # 合约代码
('InvestUnitID', c_char * 17), # 投资单元代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QuoteField(BaseField):
"""报价"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('QuoteRef', c_char * 13), # 报价引用
('UserID', c_char * 16), # 用户代码
('AskPrice', c_double), # 卖价格
('BidPrice', c_double), # 买价格
('AskVolume', c_int), # 卖数量
('BidVolume', c_int), # 买数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('AskOffsetFlag', c_char * 1), # 卖开平标志
('BidOffsetFlag', c_char * 1), # 买开平标志
('AskHedgeFlag', c_char * 1), # 卖投机套保标志
('BidHedgeFlag', c_char * 1), # 买投机套保标志
('QuoteLocalID', c_char * 13), # 本地报价编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('NotifySequence', c_int), # 报价提示序号
('OrderSubmitStatus', c_char * 1), # 报价提交状态
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('QuoteSysID', c_char * 21), # 报价编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('CancelTime', c_char * 9), # 撤销时间
('QuoteStatus', c_char * 1), # 报价状态
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('AskOrderSysID', c_char * 21), # 卖方报单编号
('BidOrderSysID', c_char * 21), # 买方报单编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('UserProductInfo', c_char * 11), # 用户端产品信息
('StatusMsg', c_char * 81), # 状态信息
('ActiveUserID', c_char * 16), # 操作用户代码
('BrokerQuoteSeq', c_int), # 经纪公司报价编号
('AskOrderRef', c_char * 13), # 衍生卖报单引用
('BidOrderRef', c_char * 13), # 衍生买报单引用
('ForQuoteSysID', c_char * 21), # 应价编号
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QuoteActionField(BaseField):
"""报价操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('QuoteActionRef', c_int), # 报价操作引用
('QuoteRef', c_char * 13), # 报价引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('QuoteSysID', c_char * 21), # 报价操作编号
('ActionFlag', c_char * 1), # 操作标志
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('QuoteLocalID', c_char * 13), # 本地报价编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('StatusMsg', c_char * 81), # 状态信息
('InstrumentID', c_char * 31), # 合约代码
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryQuoteField(BaseField):
"""报价查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('QuoteSysID', c_char * 21), # 报价编号
('InsertTimeStart', c_char * 9), # 开始时间
('InsertTimeEnd', c_char * 9), # 结束时间
('InvestUnitID', c_char * 17) # 投资单元代码
]
class ExchangeQuoteField(BaseField):
"""交易所报价信息"""
_fields_ = [
('AskPrice', c_double), # ///卖价格
('BidPrice', c_double), # 买价格
('AskVolume', c_int), # 卖数量
('BidVolume', c_int), # 买数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('AskOffsetFlag', c_char * 1), # 卖开平标志
('BidOffsetFlag', c_char * 1), # 买开平标志
('AskHedgeFlag', c_char * 1), # 卖投机套保标志
('BidHedgeFlag', c_char * 1), # 买投机套保标志
('QuoteLocalID', c_char * 13), # 本地报价编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('NotifySequence', c_int), # 报价提示序号
('OrderSubmitStatus', c_char * 1), # 报价提交状态
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('QuoteSysID', c_char * 21), # 报价编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('CancelTime', c_char * 9), # 撤销时间
('QuoteStatus', c_char * 1), # 报价状态
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('AskOrderSysID', c_char * 21), # 卖方报单编号
('BidOrderSysID', c_char * 21), # 买方报单编号
('ForQuoteSysID', c_char * 21), # 应价编号
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryExchangeQuoteField(BaseField):
"""交易所报价查询"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QryQuoteActionField(BaseField):
"""报价操作查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9) # 交易所代码
]
class ExchangeQuoteActionField(BaseField):
"""交易所报价操作"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('QuoteSysID', c_char * 21), # 报价操作编号
('ActionFlag', c_char * 1), # 操作标志
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('QuoteLocalID', c_char * 13), # 本地报价编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryExchangeQuoteActionField(BaseField):
"""交易所报价操作查询"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class OptionInstrDeltaField(BaseField):
"""期权合约delta值"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('Delta', c_double) # Delta值
]
class ForQuoteRspField(BaseField):
"""发给做市商的询价请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('InstrumentID', c_char * 31), # 合约代码
('ForQuoteSysID', c_char * 21), # 询价编号
('ForQuoteTime', c_char * 9), # 询价时间
('ActionDay', c_char * 9), # 业务日期
('ExchangeID', c_char * 9) # 交易所代码
]
class StrikeOffsetField(BaseField):
"""当前期权合约执行偏移值的详细内容"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('Offset', c_double), # 执行偏移值
('OffsetType', c_char * 1) # 执行偏移类型
]
class QryStrikeOffsetField(BaseField):
"""期权执行偏移值查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31) # 合约代码
]
class InputBatchOrderActionField(BaseField):
"""输入批量报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OrderActionRef', c_int), # 报单操作引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('UserID', c_char * 16), # 用户代码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class BatchOrderActionField(BaseField):
"""批量报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OrderActionRef', c_int), # 报单操作引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('StatusMsg', c_char * 81), # 状态信息
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ExchangeBatchOrderActionField(BaseField):
"""交易所批量报单操作"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryBatchOrderActionField(BaseField):
"""查询批量报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9) # 交易所代码
]
class CombInstrumentGuardField(BaseField):
"""组合合约安全系数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('GuarantRatio', c_double) # ,('ExchangeID',c_char*9)# 交易所代码
]
class QryCombInstrumentGuardField(BaseField):
"""组合合约安全系数查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9) # 交易所代码
]
class InputCombActionField(BaseField):
"""输入的申请组合"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('CombActionRef', c_char * 13), # 组合引用
('UserID', c_char * 16), # 用户代码
('Direction', c_char * 1), # 买卖方向
('Volume', c_int), # 数量
('CombDirection', c_char * 1), # 组合指令方向
('HedgeFlag', c_char * 1), # 投机套保标志
('ExchangeID', c_char * 9), # 交易所代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('InvestUnitID', c_char * 17) # 投资单元代码
]
class CombActionField(BaseField):
"""申请组合"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('CombActionRef', c_char * 13), # 组合引用
('UserID', c_char * 16), # 用户代码
('Direction', c_char * 1), # 买卖方向
('Volume', c_int), # 数量
('CombDirection', c_char * 1), # 组合指令方向
('HedgeFlag', c_char * 1), # 投机套保标志
('ActionLocalID', c_char * 13), # 本地申请组合编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('ActionStatus', c_char * 1), # 组合状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('SequenceNo', c_int), # 序号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('UserProductInfo', c_char * 11), # 用户端产品信息
('StatusMsg', c_char * 81), # 状态信息
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ComTradeID', c_char * 21), # 组合编号
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryCombActionField(BaseField):
"""申请组合查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class ExchangeCombActionField(BaseField):
"""交易所申请组合信息"""
_fields_ = [
('Direction', c_char * 1), # ///买卖方向
('Volume', c_int), # 数量
('CombDirection', c_char * 1), # 组合指令方向
('HedgeFlag', c_char * 1), # 投机套保标志
('ActionLocalID', c_char * 13), # 本地申请组合编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('ActionStatus', c_char * 1), # 组合状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('SequenceNo', c_int), # 序号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ComTradeID', c_char * 21), # 组合编号
('BranchID', c_char * 9) # 营业部编号
]
class QryExchangeCombActionField(BaseField):
"""交易所申请组合查询"""
_fields_ = [
('ParticipantID', c_char * 11), # ///会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('ExchangeID', c_char * 9), # 交易所代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class ProductExchRateField(BaseField):
"""产品报价汇率"""
_fields_ = [
('ProductID', c_char * 31), # ///产品代码
('QuoteCurrencyID', c_char * 4), # 报价币种类型
('ExchangeRate', c_double), # 汇率
('ExchangeID', c_char * 9) # 交易所代码
]
class QryProductExchRateField(BaseField):
"""产品报价汇率查询"""
_fields_ = [
('ProductID', c_char * 31), # ///产品代码
('ExchangeID', c_char * 9) # 交易所代码
]
class QryForQuoteParamField(BaseField):
"""查询询价价差参数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9) # 交易所代码
]
class ForQuoteParamField(BaseField):
"""询价价差参数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('LastPrice', c_double), # 最新价
('PriceInterval', c_double) # 价差
]
class MMOptionInstrCommRateField(BaseField):
"""当前做市商期权合约手续费的详细内容"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OpenRatioByMoney', c_double), # 开仓手续费率
('OpenRatioByVolume', c_double), # 开仓手续费
('CloseRatioByMoney', c_double), # 平仓手续费率
('CloseRatioByVolume', c_double), # 平仓手续费
('CloseTodayRatioByMoney', c_double), # 平今手续费率
('CloseTodayRatioByVolume', c_double), # 平今手续费
('StrikeRatioByMoney', c_double), # 执行手续费率
('StrikeRatioByVolume', c_double) # 执行手续费
]
class QryMMOptionInstrCommRateField(BaseField):
"""做市商期权手续费率查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31) # 合约代码
]
class MMInstrumentCommissionRateField(BaseField):
"""做市商合约手续费率"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OpenRatioByMoney', c_double), # 开仓手续费率
('OpenRatioByVolume', c_double), # 开仓手续费
('CloseRatioByMoney', c_double), # 平仓手续费率
('CloseRatioByVolume', c_double), # 平仓手续费
('CloseTodayRatioByMoney', c_double), # 平今手续费率
('CloseTodayRatioByVolume', c_double) # 平今手续费
]
class QryMMInstrumentCommissionRateField(BaseField):
"""查询做市商合约手续费率"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31) # 合约代码
]
class InstrumentOrderCommRateField(BaseField):
"""当前报单手续费的详细内容"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('HedgeFlag', c_char * 1), # 投机套保标志
('OrderCommByVolume', c_double), # 报单手续费
('OrderActionCommByVolume', c_double), # 撤单手续费
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryInstrumentOrderCommRateField(BaseField):
"""报单手续费率查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31) # 合约代码
]
class TradeParamField(BaseField):
"""交易参数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('TradeParamID', c_char * 1), # 参数代码
('TradeParamValue', c_char * 256), # 参数代码值
('Memo', c_char * 161) # 备注
]
class InstrumentMarginRateULField(BaseField):
"""合约保证金率调整"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('InvestorRange', c_char * 1), # 投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('HedgeFlag', c_char * 1), # 投机套保标志
('LongMarginRatioByMoney', c_double), # 多头保证金率
('LongMarginRatioByVolume', c_double), # 多头保证金费
('ShortMarginRatioByMoney', c_double), # 空头保证金率
('ShortMarginRatioByVolume', c_double) # 空头保证金费
]
class FutureLimitPosiParamField(BaseField):
"""期货持仓限制参数"""
_fields_ = [
('InvestorRange', c_char * 1), # ///投资者范围
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ProductID', c_char * 31), # 产品代码
('SpecOpenVolume', c_int), # 当日投机开仓数量限制
('ArbiOpenVolume', c_int), # 当日套利开仓数量限制
('OpenVolume', c_int) # 当日投机+套利开仓数量限制
]
class LoginForbiddenIPField(BaseField):
"""禁止登录IP"""
_fields_ = [
('IPAddress', c_char * 16) # ///IP地址
]
class IPListField(BaseField):
"""IP列表"""
_fields_ = [
('IPAddress', c_char * 16), # ///IP地址
('IsWhite', c_int) # 是否白名单
]
class InputOptionSelfCloseField(BaseField):
"""输入的期权自对冲"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OptionSelfCloseRef', c_char * 13), # 期权自对冲引用
('UserID', c_char * 16), # 用户代码
('Volume', c_int), # 数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('HedgeFlag', c_char * 1), # 投机套保标志
('OptSelfCloseFlag', c_char * 1), # 期权行权的头寸是否自对冲
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class InputOptionSelfCloseActionField(BaseField):
"""输入期权自对冲操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OptionSelfCloseActionRef', c_int), # 期权自对冲操作引用
('OptionSelfCloseRef', c_char * 13), # 期权自对冲引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('OptionSelfCloseSysID', c_char * 21), # 期权自对冲操作编号
('ActionFlag', c_char * 1), # 操作标志
('UserID', c_char * 16), # 用户代码
('InstrumentID', c_char * 31), # 合约代码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class OptionSelfCloseField(BaseField):
"""期权自对冲"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OptionSelfCloseRef', c_char * 13), # 期权自对冲引用
('UserID', c_char * 16), # 用户代码
('Volume', c_int), # 数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('HedgeFlag', c_char * 1), # 投机套保标志
('OptSelfCloseFlag', c_char * 1), # 期权行权的头寸是否自对冲
('OptionSelfCloseLocalID', c_char * 13), # 本地期权自对冲编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 期权自对冲提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OptionSelfCloseSysID', c_char * 21), # 期权自对冲编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('CancelTime', c_char * 9), # 撤销时间
('ExecResult', c_char * 1), # 自对冲结果
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('UserProductInfo', c_char * 11), # 用户端产品信息
('StatusMsg', c_char * 81), # 状态信息
('ActiveUserID', c_char * 16), # 操作用户代码
('BrokerOptionSelfCloseSeq', c_int), # 经纪公司报单编号
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class OptionSelfCloseActionField(BaseField):
"""期权自对冲操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OptionSelfCloseActionRef', c_int), # 期权自对冲操作引用
('OptionSelfCloseRef', c_char * 13), # 期权自对冲引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('OptionSelfCloseSysID', c_char * 21), # 期权自对冲操作编号
('ActionFlag', c_char * 1), # 操作标志
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OptionSelfCloseLocalID', c_char * 13), # 本地期权自对冲编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('StatusMsg', c_char * 81), # 状态信息
('InstrumentID', c_char * 31), # 合约代码
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryOptionSelfCloseField(BaseField):
"""期权自对冲查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('OptionSelfCloseSysID', c_char * 21), # 期权自对冲编号
('InsertTimeStart', c_char * 9), # 开始时间
('InsertTimeEnd', c_char * 9) # 结束时间
]
class ExchangeOptionSelfCloseField(BaseField):
"""交易所期权自对冲信息"""
_fields_ = [
('Volume', c_int), # ///数量
('RequestID', c_int), # 请求编号
('BusinessUnit', c_char * 21), # 业务单元
('HedgeFlag', c_char * 1), # 投机套保标志
('OptSelfCloseFlag', c_char * 1), # 期权行权的头寸是否自对冲
('OptionSelfCloseLocalID', c_char * 13), # 本地期权自对冲编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 期权自对冲提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OptionSelfCloseSysID', c_char * 21), # 期权自对冲编号
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 插入时间
('CancelTime', c_char * 9), # 撤销时间
('ExecResult', c_char * 1), # 自对冲结果
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryOptionSelfCloseActionField(BaseField):
"""期权自对冲操作查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9) # 交易所代码
]
class ExchangeOptionSelfCloseActionField(BaseField):
"""交易所期权自对冲操作"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('OptionSelfCloseSysID', c_char * 21), # 期权自对冲操作编号
('ActionFlag', c_char * 1), # 操作标志
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OptionSelfCloseLocalID', c_char * 13), # 本地期权自对冲编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('BranchID', c_char * 9), # 营业部编号
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('OptSelfCloseFlag', c_char * 1) # 期权行权的头寸是否自对冲
]
class SyncDelaySwapField(BaseField):
"""延时换汇同步"""
_fields_ = [
('DelaySwapSeqNo', c_char * 15), # ///换汇流水号
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('FromCurrencyID', c_char * 4), # 源币种
('FromAmount', c_double), # 源金额
('FromFrozenSwap', c_double), # 源换汇冻结金额(可用冻结)
('FromRemainSwap', c_double), # 源剩余换汇额度(可提冻结)
('ToCurrencyID', c_char * 4), # 目标币种
('ToAmount', c_double) # 目标金额
]
class QrySyncDelaySwapField(BaseField):
"""查询延时换汇同步"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('DelaySwapSeqNo', c_char * 15) # 延时换汇流水号
]
class InvestUnitField(BaseField):
"""投资单元"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InvestUnitID', c_char * 17), # 投资单元代码
('InvestorUnitName', c_char * 81), # 投资者单元名称
('InvestorGroupID', c_char * 13), # 投资者分组代码
('CommModelID', c_char * 13), # 手续费率模板代码
('MarginModelID', c_char * 13), # 保证金率模板代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4) # 币种代码
]
class QryInvestUnitField(BaseField):
"""查询投资单元"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class SecAgentCheckModeField(BaseField):
"""二级代理商资金校验模式"""
_fields_ = [
('InvestorID', c_char * 13), # ///投资者代码
('BrokerID', c_char * 11), # 经纪公司代码
('CurrencyID', c_char * 4), # 币种
('BrokerSecAgentID', c_char * 13), # 境外中介机构资金帐号
('CheckSelfAccount', c_int) # 是否需要校验自己的资金账户
]
class SecAgentTradeInfoField(BaseField):
"""二级代理商信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('BrokerSecAgentID', c_char * 13), # 境外中介机构资金帐号
('InvestorID', c_char * 13), # 投资者代码
('LongCustomerName', c_char * 161) # 二级代理商姓名
]
class MarketDataField(BaseField):
"""市场行情"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('LastPrice', c_double), # 最新价
('PreSettlementPrice', c_double), # 上次结算价
('PreClosePrice', c_double), # 昨收盘
('PreOpenInterest', c_double), # 昨持仓量
('OpenPrice', c_double), # 今开盘
('HighestPrice', c_double), # 最高价
('LowestPrice', c_double), # 最低价
('Volume', c_int), # 数量
('Turnover', c_double), # 成交金额
('OpenInterest', c_double), # 持仓量
('ClosePrice', c_double), # 今收盘
('SettlementPrice', c_double), # 本次结算价
('UpperLimitPrice', c_double), # 涨停板价
('LowerLimitPrice', c_double), # 跌停板价
('PreDelta', c_double), # 昨虚实度
('CurrDelta', c_double), # 今虚实度
('UpdateTime', c_char * 9), # 最后修改时间
('UpdateMillisec', c_int), # 最后修改毫秒
('ActionDay', c_char * 9) # 业务日期
]
class MarketDataBaseField(BaseField):
"""行情基础属性"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('PreSettlementPrice', c_double), # 上次结算价
('PreClosePrice', c_double), # 昨收盘
('PreOpenInterest', c_double), # 昨持仓量
('PreDelta', c_double) # 昨虚实度
]
class MarketDataStaticField(BaseField):
"""行情静态属性"""
_fields_ = [
('OpenPrice', c_double), # ///今开盘
('HighestPrice', c_double), # 最高价
('LowestPrice', c_double), # 最低价
('ClosePrice', c_double), # 今收盘
('UpperLimitPrice', c_double), # 涨停板价
('LowerLimitPrice', c_double), # 跌停板价
('SettlementPrice', c_double), # 本次结算价
('CurrDelta', c_double) # 今虚实度
]
class MarketDataLastMatchField(BaseField):
"""行情最新成交属性"""
_fields_ = [
('LastPrice', c_double), # ///最新价
('Volume', c_int), # 数量
('Turnover', c_double), # 成交金额
('OpenInterest', c_double) # 持仓量
]
class MarketDataBestPriceField(BaseField):
"""行情最优价属性"""
_fields_ = [
('BidPrice1', c_double), # ///申买价一
('BidVolume1', c_int), # 申买量一
('AskPrice1', c_double), # 申卖价一
('AskVolume1', c_int) # 申卖量一
]
class MarketDataBid23Field(BaseField):
"""行情申买二、三属性"""
_fields_ = [
('BidPrice2', c_double), # ///申买价二
('BidVolume2', c_int), # 申买量二
('BidPrice3', c_double), # 申买价三
('BidVolume3', c_int) # 申买量三
]
class MarketDataAsk23Field(BaseField):
"""行情申卖二、三属性"""
_fields_ = [
('AskPrice2', c_double), # ///申卖价二
('AskVolume2', c_int), # 申卖量二
('AskPrice3', c_double), # 申卖价三
('AskVolume3', c_int) # 申卖量三
]
class MarketDataBid45Field(BaseField):
"""行情申买四、五属性"""
_fields_ = [
('BidPrice4', c_double), # ///申买价四
('BidVolume4', c_int), # 申买量四
('BidPrice5', c_double), # 申买价五
('BidVolume5', c_int) # 申买量五
]
class MarketDataAsk45Field(BaseField):
"""行情申卖四、五属性"""
_fields_ = [
('AskPrice4', c_double), # ///申卖价四
('AskVolume4', c_int), # 申卖量四
('AskPrice5', c_double), # 申卖价五
('AskVolume5', c_int) # 申卖量五
]
class MarketDataUpdateTimeField(BaseField):
"""行情更新时间属性"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('UpdateTime', c_char * 9), # 最后修改时间
('UpdateMillisec', c_int), # 最后修改毫秒
('ActionDay', c_char * 9) # 业务日期
]
class MarketDataExchangeField(BaseField):
"""行情交易所代码属性"""
_fields_ = [
('ExchangeID', c_char * 9) # ///交易所代码
]
class SpecificInstrumentField(BaseField):
"""指定的合约"""
_fields_ = [
('InstrumentID', c_char * 31) # ///合约代码
]
class InstrumentStatusField(BaseField):
"""合约状态"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('SettlementGroupID', c_char * 9), # 结算组代码
('InstrumentID', c_char * 31), # 合约代码
('InstrumentStatus', c_char * 1), # 合约交易状态
('TradingSegmentSN', c_int), # 交易阶段编号
('EnterTime', c_char * 9), # 进入本状态时间
('EnterReason', c_char * 1) # 进入本状态原因
]
class QryInstrumentStatusField(BaseField):
"""查询合约状态"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ExchangeInstID', c_char * 31) # 合约在交易所的代码
]
class InvestorAccountField(BaseField):
"""投资者账户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('AccountID', c_char * 13), # 投资者帐号
('CurrencyID', c_char * 4) # 币种代码
]
class PositionProfitAlgorithmField(BaseField):
"""浮动盈亏算法"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('Algorithm', c_char * 1), # 盈亏算法
('Memo', c_char * 161), # 备注
('CurrencyID', c_char * 4) # 币种代码
]
class DiscountField(BaseField):
"""会员资金折扣"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorRange', c_char * 1), # 投资者范围
('InvestorID', c_char * 13), # 投资者代码
('Discount', c_double) # 资金折扣比例
]
class QryTransferBankField(BaseField):
"""查询转帐银行"""
_fields_ = [
('BankID', c_char * 4), # ///银行代码
('BankBrchID', c_char * 5) # 银行分中心代码
]
class TransferBankField(BaseField):
"""转帐银行"""
_fields_ = [
('BankID', c_char * 4), # ///银行代码
('BankBrchID', c_char * 5), # 银行分中心代码
('BankName', c_char * 101), # 银行名称
('IsActive', c_int) # 是否活跃
]
class QryInvestorPositionDetailField(BaseField):
"""查询投资者持仓明细"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class InvestorPositionDetailField(BaseField):
"""投资者持仓明细"""
_fields_ = [
('InstrumentID', c_char * 31), # ///合约代码
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('HedgeFlag', c_char * 1), # 投机套保标志
('Direction', c_char * 1), # 买卖
('OpenDate', c_char * 9), # 开仓日期
('TradeID', c_char * 21), # 成交编号
('Volume', c_int), # 数量
('OpenPrice', c_double), # 开仓价
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('TradeType', c_char * 1), # 成交类型
('CombInstrumentID', c_char * 31), # 组合合约代码
('ExchangeID', c_char * 9), # 交易所代码
('CloseProfitByDate', c_double), # 逐日盯市平仓盈亏
('CloseProfitByTrade', c_double), # 逐笔对冲平仓盈亏
('PositionProfitByDate', c_double), # 逐日盯市持仓盈亏
('PositionProfitByTrade', c_double), # 逐笔对冲持仓盈亏
('Margin', c_double), # 投资者保证金
('ExchMargin', c_double), # 交易所保证金
('MarginRateByMoney', c_double), # 保证金率
('MarginRateByVolume', c_double), # 保证金率(按手数)
('LastSettlementPrice', c_double), # 昨结算价
('SettlementPrice', c_double), # 结算价
('CloseVolume', c_int), # 平仓量
('CloseAmount', c_double), # 平仓金额
('TimeFirstVolume', c_int), # 按照时间顺序平仓的笔数,大商所专用
('InvestUnitID', c_char * 17) # 投资单元代码
]
class TradingAccountPasswordField(BaseField):
"""资金账户口令域"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 密码
('CurrencyID', c_char * 4) # 币种代码
]
class MDTraderOfferField(BaseField):
"""交易所行情报盘机"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('TraderID', c_char * 21), # 交易所交易员代码
('ParticipantID', c_char * 11), # 会员代码
('Password', c_char * 41), # 密码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('TraderConnectStatus', c_char * 1), # 交易所交易员连接状态
('ConnectRequestDate', c_char * 9), # 发出连接请求的日期
('ConnectRequestTime', c_char * 9), # 发出连接请求的时间
('LastReportDate', c_char * 9), # 上次报告日期
('LastReportTime', c_char * 9), # 上次报告时间
('ConnectDate', c_char * 9), # 完成连接日期
('ConnectTime', c_char * 9), # 完成连接时间
('StartDate', c_char * 9), # 启动日期
('StartTime', c_char * 9), # 启动时间
('TradingDay', c_char * 9), # 交易日
('BrokerID', c_char * 11), # 经纪公司代码
('MaxTradeID', c_char * 21), # 本席位最大成交编号
('MaxOrderMessageReference', c_char * 7) # 本席位最大报单备拷
]
class QryMDTraderOfferField(BaseField):
"""查询行情报盘机"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('ParticipantID', c_char * 11), # 会员代码
('TraderID', c_char * 21) # 交易所交易员代码
]
class QryNoticeField(BaseField):
"""查询客户通知"""
_fields_ = [
('BrokerID', c_char * 11) # ///经纪公司代码
]
class NoticeField(BaseField):
"""客户通知"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('Content', c_char * 501), # 消息正文
('SequenceLabel', c_char * 2) # 经纪公司通知内容序列号
]
class UserRightField(BaseField):
"""用户权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserRightType', c_char * 1), # 客户权限类型
('IsForbidden', c_int) # 是否禁止
]
class QrySettlementInfoConfirmField(BaseField):
"""查询结算信息确认域"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('AccountID', c_char * 13), # 投资者帐号
('CurrencyID', c_char * 4) # 币种代码
]
class LoadSettlementInfoField(BaseField):
"""装载结算信息"""
_fields_ = [
('BrokerID', c_char * 11) # ///经纪公司代码
]
class BrokerWithdrawAlgorithmField(BaseField):
"""经纪公司可提资金算法表"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('WithdrawAlgorithm', c_char * 1), # 可提资金算法
('UsingRatio', c_double), # 资金使用率
('IncludeCloseProfit', c_char * 1), # 可提是否包含平仓盈利
('AllWithoutTrade', c_char * 1), # 本日无仓且无成交客户是否受可提比例限制
('AvailIncludeCloseProfit', c_char * 1), # 可用是否包含平仓盈利
('IsBrokerUserEvent', c_int), # 是否启用用户事件
('CurrencyID', c_char * 4), # 币种代码
('FundMortgageRatio', c_double), # 货币质押比率
('BalanceAlgorithm', c_char * 1) # 权益算法
]
class TradingAccountPasswordUpdateV1Field(BaseField):
"""资金账户口令变更域"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OldPassword', c_char * 41), # 原来的口令
('NewPassword', c_char * 41) # 新的口令
]
class TradingAccountPasswordUpdateField(BaseField):
"""资金账户口令变更域"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('OldPassword', c_char * 41), # 原来的口令
('NewPassword', c_char * 41), # 新的口令
('CurrencyID', c_char * 4) # 币种代码
]
class QryCombinationLegField(BaseField):
"""查询组合合约分腿"""
_fields_ = [
('CombInstrumentID', c_char * 31), # ///组合合约代码
('LegID', c_int), # 单腿编号
('LegInstrumentID', c_char * 31) # 单腿合约代码
]
class QrySyncStatusField(BaseField):
"""查询组合合约分腿"""
_fields_ = [
('TradingDay', c_char * 9) # ///交易日
]
class CombinationLegField(BaseField):
"""组合交易合约的单腿"""
_fields_ = [
('CombInstrumentID', c_char * 31), # ///组合合约代码
('LegID', c_int), # 单腿编号
('LegInstrumentID', c_char * 31), # 单腿合约代码
('Direction', c_char * 1), # 买卖方向
('LegMultiple', c_int), # 单腿乘数
('ImplyLevel', c_int) # 派生层数
]
class SyncStatusField(BaseField):
"""数据同步状态"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('DataSyncStatus', c_char * 1) # 数据同步状态
]
class QryLinkManField(BaseField):
"""查询联系人"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class LinkManField(BaseField):
"""联系人"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('PersonType', c_char * 1), # 联系人类型
('IdentifiedCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('PersonName', c_char * 81), # 名称
('Telephone', c_char * 41), # 联系电话
('Address', c_char * 101), # 通讯地址
('ZipCode', c_char * 7), # 邮政编码
('Priority', c_int), # 优先级
('UOAZipCode', c_char * 11), # 开户邮政编码
('PersonFullName', c_char * 101) # 全称
]
class QryBrokerUserEventField(BaseField):
"""查询经纪公司用户事件"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserEventType', c_char * 1) # 用户事件类型
]
class BrokerUserEventField(BaseField):
"""查询经纪公司用户事件"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('UserEventType', c_char * 1), # 用户事件类型
('EventSequenceNo', c_int), # 用户事件序号
('EventDate', c_char * 9), # 事件发生日期
('EventTime', c_char * 9), # 事件发生时间
('UserEventInfo', c_char * 1025), # 用户事件信息
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31) # 合约代码
]
class QryContractBankField(BaseField):
"""查询签约银行请求"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('BankID', c_char * 4), # 银行代码
('BankBrchID', c_char * 5) # 银行分中心代码
]
class ContractBankField(BaseField):
"""查询签约银行响应"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('BankID', c_char * 4), # 银行代码
('BankBrchID', c_char * 5), # 银行分中心代码
('BankName', c_char * 101) # 银行名称
]
class InvestorPositionCombineDetailField(BaseField):
"""投资者组合持仓明细"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('OpenDate', c_char * 9), # 开仓日期
('ExchangeID', c_char * 9), # 交易所代码
('SettlementID', c_int), # 结算编号
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ComTradeID', c_char * 21), # 组合编号
('TradeID', c_char * 21), # 撮合编号
('InstrumentID', c_char * 31), # 合约代码
('HedgeFlag', c_char * 1), # 投机套保标志
('Direction', c_char * 1), # 买卖
('TotalAmt', c_int), # 持仓量
('Margin', c_double), # 投资者保证金
('ExchMargin', c_double), # 交易所保证金
('MarginRateByMoney', c_double), # 保证金率
('MarginRateByVolume', c_double), # 保证金率(按手数)
('LegID', c_int), # 单腿编号
('LegMultiple', c_int), # 单腿乘数
('CombInstrumentID', c_char * 31), # 组合持仓合约编码
('TradeGroupID', c_int), # 成交组号
('InvestUnitID', c_char * 17) # 投资单元代码
]
class ParkedOrderField(BaseField):
"""预埋单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('UserID', c_char * 16), # 用户代码
('OrderPriceType', c_char * 1), # 报单价格条件
('Direction', c_char * 1), # 买卖方向
('CombOffsetFlag', c_char * 5), # 组合开平标志
('CombHedgeFlag', c_char * 5), # 组合投机套保标志
('LimitPrice', c_double), # 价格
('VolumeTotalOriginal', c_int), # 数量
('TimeCondition', c_char * 1), # 有效期类型
('GTDDate', c_char * 9), # GTD日期
('VolumeCondition', c_char * 1), # 成交量类型
('MinVolume', c_int), # 最小成交量
('ContingentCondition', c_char * 1), # 触发条件
('StopPrice', c_double), # 止损价
('ForceCloseReason', c_char * 1), # 强平原因
('IsAutoSuspend', c_int), # 自动挂起标志
('BusinessUnit', c_char * 21), # 业务单元
('RequestID', c_int), # 请求编号
('UserForceClose', c_int), # 用户强评标志
('ExchangeID', c_char * 9), # 交易所代码
('ParkedOrderID', c_char * 13), # 预埋报单编号
('UserType', c_char * 1), # 用户类型
('Status', c_char * 1), # 预埋单状态
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('IsSwapOrder', c_int), # 互换单标志
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('ClientID', c_char * 11), # 交易编码
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ParkedOrderActionField(BaseField):
"""输入预埋单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OrderActionRef', c_int), # 报单操作引用
('OrderRef', c_char * 13), # 报单引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('OrderSysID', c_char * 21), # 报单编号
('ActionFlag', c_char * 1), # 操作标志
('LimitPrice', c_double), # 价格
('VolumeChange', c_int), # 数量变化
('UserID', c_char * 16), # 用户代码
('InstrumentID', c_char * 31), # 合约代码
('ParkedOrderActionID', c_char * 13), # 预埋撤单单编号
('UserType', c_char * 1), # 用户类型
('Status', c_char * 1), # 预埋撤单状态
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryParkedOrderField(BaseField):
"""查询预埋单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryParkedOrderActionField(BaseField):
"""查询预埋撤单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class RemoveParkedOrderField(BaseField):
"""删除预埋单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ParkedOrderID', c_char * 13), # 预埋报单编号
('InvestUnitID', c_char * 17) # 投资单元代码
]
class RemoveParkedOrderActionField(BaseField):
"""删除预埋撤单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ParkedOrderActionID', c_char * 13), # 预埋撤单编号
('InvestUnitID', c_char * 17) # 投资单元代码
]
class InvestorWithdrawAlgorithmField(BaseField):
"""经纪公司可提资金算法表"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorRange', c_char * 1), # 投资者范围
('InvestorID', c_char * 13), # 投资者代码
('UsingRatio', c_double), # 可提资金比例
('CurrencyID', c_char * 4), # 币种代码
('FundMortgageRatio', c_double) # 货币质押比率
]
class QryInvestorPositionCombineDetailField(BaseField):
"""查询组合持仓明细"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('CombInstrumentID', c_char * 31), # 组合持仓合约编码
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class MarketDataAveragePriceField(BaseField):
"""成交均价"""
_fields_ = [
('AveragePrice', c_double) # ///当日均价
]
class VerifyInvestorPasswordField(BaseField):
"""校验投资者密码"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('Password', c_char * 41) # 密码
]
class UserIPField(BaseField):
"""用户IP"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('IPAddress', c_char * 16), # IP地址
('IPMask', c_char * 16), # IP地址掩码
('MacAddress', c_char * 21) # Mac地址
]
class TradingNoticeInfoField(BaseField):
"""用户事件通知信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('SendTime', c_char * 9), # 发送时间
('FieldContent', c_char * 501), # 消息正文
('SequenceSeries', c_short), # 序列系列号
('SequenceNo', c_int), # 序列号
('InvestUnitID', c_char * 17) # 投资单元代码
]
class TradingNoticeField(BaseField):
"""用户事件通知"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorRange', c_char * 1), # 投资者范围
('InvestorID', c_char * 13), # 投资者代码
('SequenceSeries', c_short), # 序列系列号
('UserID', c_char * 16), # 用户代码
('SendTime', c_char * 9), # 发送时间
('SequenceNo', c_int), # 序列号
('FieldContent', c_char * 501), # 消息正文
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryTradingNoticeField(BaseField):
"""查询交易事件通知"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryErrOrderField(BaseField):
"""查询错误报单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class ErrOrderField(BaseField):
"""错误报单"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('UserID', c_char * 16), # 用户代码
('OrderPriceType', c_char * 1), # 报单价格条件
('Direction', c_char * 1), # 买卖方向
('CombOffsetFlag', c_char * 5), # 组合开平标志
('CombHedgeFlag', c_char * 5), # 组合投机套保标志
('LimitPrice', c_double), # 价格
('VolumeTotalOriginal', c_int), # 数量
('TimeCondition', c_char * 1), # 有效期类型
('GTDDate', c_char * 9), # GTD日期
('VolumeCondition', c_char * 1), # 成交量类型
('MinVolume', c_int), # 最小成交量
('ContingentCondition', c_char * 1), # 触发条件
('StopPrice', c_double), # 止损价
('ForceCloseReason', c_char * 1), # 强平原因
('IsAutoSuspend', c_int), # 自动挂起标志
('BusinessUnit', c_char * 21), # 业务单元
('RequestID', c_int), # 请求编号
('UserForceClose', c_int), # 用户强评标志
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('IsSwapOrder', c_int), # 互换单标志
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('ClientID', c_char * 11), # 交易编码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class ErrorConditionalOrderField(BaseField):
"""查询错误报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('OrderRef', c_char * 13), # 报单引用
('UserID', c_char * 16), # 用户代码
('OrderPriceType', c_char * 1), # 报单价格条件
('Direction', c_char * 1), # 买卖方向
('CombOffsetFlag', c_char * 5), # 组合开平标志
('CombHedgeFlag', c_char * 5), # 组合投机套保标志
('LimitPrice', c_double), # 价格
('VolumeTotalOriginal', c_int), # 数量
('TimeCondition', c_char * 1), # 有效期类型
('GTDDate', c_char * 9), # GTD日期
('VolumeCondition', c_char * 1), # 成交量类型
('MinVolume', c_int), # 最小成交量
('ContingentCondition', c_char * 1), # 触发条件
('StopPrice', c_double), # 止损价
('ForceCloseReason', c_char * 1), # 强平原因
('IsAutoSuspend', c_int), # 自动挂起标志
('BusinessUnit', c_char * 21), # 业务单元
('RequestID', c_int), # 请求编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ExchangeID', c_char * 9), # 交易所代码
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('ExchangeInstID', c_char * 31), # 合约在交易所的代码
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderSubmitStatus', c_char * 1), # 报单提交状态
('NotifySequence', c_int), # 报单提示序号
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('OrderSysID', c_char * 21), # 报单编号
('OrderSource', c_char * 1), # 报单来源
('OrderStatus', c_char * 1), # 报单状态
('OrderType', c_char * 1), # 报单类型
('VolumeTraded', c_int), # 今成交数量
('VolumeTotal', c_int), # 剩余数量
('InsertDate', c_char * 9), # 报单日期
('InsertTime', c_char * 9), # 委托时间
('ActiveTime', c_char * 9), # 激活时间
('SuspendTime', c_char * 9), # 挂起时间
('UpdateTime', c_char * 9), # 最后修改时间
('CancelTime', c_char * 9), # 撤销时间
('ActiveTraderID', c_char * 21), # 最后修改交易所交易员代码
('ClearingPartID', c_char * 11), # 结算会员编号
('SequenceNo', c_int), # 序号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('UserProductInfo', c_char * 11), # 用户端产品信息
('StatusMsg', c_char * 81), # 状态信息
('UserForceClose', c_int), # 用户强评标志
('ActiveUserID', c_char * 16), # 操作用户代码
('BrokerOrderSeq', c_int), # 经纪公司报单编号
('RelativeOrderSysID', c_char * 21), # 相关报单
('ZCETotalTradedVolume', c_int), # 郑商所成交数量
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('IsSwapOrder', c_int), # 互换单标志
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('AccountID', c_char * 13), # 资金账号
('CurrencyID', c_char * 4), # 币种代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21) # Mac地址
]
class QryErrOrderActionField(BaseField):
"""查询错误报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class ErrOrderActionField(BaseField):
"""错误报单操作"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('OrderActionRef', c_int), # 报单操作引用
('OrderRef', c_char * 13), # 报单引用
('RequestID', c_int), # 请求编号
('FrontID', c_int), # 前置编号
('SessionID', c_int), # 会话编号
('ExchangeID', c_char * 9), # 交易所代码
('OrderSysID', c_char * 21), # 报单编号
('ActionFlag', c_char * 1), # 操作标志
('LimitPrice', c_double), # 价格
('VolumeChange', c_int), # 数量变化
('ActionDate', c_char * 9), # 操作日期
('ActionTime', c_char * 9), # 操作时间
('TraderID', c_char * 21), # 交易所交易员代码
('InstallID', c_int), # 安装编号
('OrderLocalID', c_char * 13), # 本地报单编号
('ActionLocalID', c_char * 13), # 操作本地编号
('ParticipantID', c_char * 11), # 会员代码
('ClientID', c_char * 11), # 客户代码
('BusinessUnit', c_char * 21), # 业务单元
('OrderActionStatus', c_char * 1), # 报单操作状态
('UserID', c_char * 16), # 用户代码
('StatusMsg', c_char * 81), # 状态信息
('InstrumentID', c_char * 31), # 合约代码
('BranchID', c_char * 9), # 营业部编号
('InvestUnitID', c_char * 17), # 投资单元代码
('IPAddress', c_char * 16), # IP地址
('MacAddress', c_char * 21), # Mac地址
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class QryExchangeSequenceField(BaseField):
"""查询交易所状态"""
_fields_ = [
('ExchangeID', c_char * 9) # ///交易所代码
]
class ExchangeSequenceField(BaseField):
"""交易所状态"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('SequenceNo', c_int), # 序号
('MarketStatus', c_char * 1) # 合约交易状态
]
class QueryMaxOrderVolumeWithPriceField(BaseField):
"""根据价格查询最大报单数量"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InstrumentID', c_char * 31), # 合约代码
('Direction', c_char * 1), # 买卖方向
('OffsetFlag', c_char * 1), # 开平标志
('HedgeFlag', c_char * 1), # 投机套保标志
('MaxVolume', c_int), # 最大允许报单数量
('Price', c_double), # 报单价格
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryBrokerTradingParamsField(BaseField):
"""查询经纪公司交易参数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('CurrencyID', c_char * 4), # 币种代码
('AccountID', c_char * 13) # 投资者帐号
]
class BrokerTradingParamsField(BaseField):
"""经纪公司交易参数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('MarginPriceType', c_char * 1), # 保证金价格类型
('Algorithm', c_char * 1), # 盈亏算法
('AvailIncludeCloseProfit', c_char * 1), # 可用是否包含平仓盈利
('CurrencyID', c_char * 4), # 币种代码
('OptionRoyaltyPriceType', c_char * 1), # 期权权利金价格类型
('AccountID', c_char * 13) # 投资者帐号
]
class QryBrokerTradingAlgosField(BaseField):
"""查询经纪公司交易算法"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ExchangeID', c_char * 9), # 交易所代码
('InstrumentID', c_char * 31) # 合约代码
]
class BrokerTradingAlgosField(BaseField):
"""经纪公司交易算法"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ExchangeID', c_char * 9), # 交易所代码
('InstrumentID', c_char * 31), # 合约代码
('HandlePositionAlgoID', c_char * 1), # 持仓处理算法编号
('FindMarginRateAlgoID', c_char * 1), # 寻找保证金率算法编号
('HandleTradingAccountAlgoID', c_char * 1) # 资金处理算法编号
]
class QueryBrokerDepositField(BaseField):
"""查询经纪公司资金"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ExchangeID', c_char * 9) # 交易所代码
]
class BrokerDepositField(BaseField):
"""经纪公司资金"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日期
('BrokerID', c_char * 11), # 经纪公司代码
('ParticipantID', c_char * 11), # 会员代码
('ExchangeID', c_char * 9), # 交易所代码
('PreBalance', c_double), # 上次结算准备金
('CurrMargin', c_double), # 当前保证金总额
('CloseProfit', c_double), # 平仓盈亏
('Balance', c_double), # 期货结算准备金
('Deposit', c_double), # 入金金额
('Withdraw', c_double), # 出金金额
('Available', c_double), # 可提资金
('Reserve', c_double), # 基本准备金
('FrozenMargin', c_double) # 冻结的保证金
]
class QryCFMMCBrokerKeyField(BaseField):
"""查询保证金监管系统经纪公司密钥"""
_fields_ = [
('BrokerID', c_char * 11) # ///经纪公司代码
]
class CFMMCBrokerKeyField(BaseField):
"""保证金监管系统经纪公司密钥"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ParticipantID', c_char * 11), # 经纪公司统一编码
('CreateDate', c_char * 9), # 密钥生成日期
('CreateTime', c_char * 9), # 密钥生成时间
('KeyID', c_int), # 密钥编号
('CurrentKey', c_char * 21), # 动态密钥
('KeyKind', c_char * 1) # 动态密钥类型
]
class CFMMCTradingAccountKeyField(BaseField):
"""保证金监管系统经纪公司资金账户密钥"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ParticipantID', c_char * 11), # 经纪公司统一编码
('AccountID', c_char * 13), # 投资者帐号
('KeyID', c_int), # 密钥编号
('CurrentKey', c_char * 21) # 动态密钥
]
class QryCFMMCTradingAccountKeyField(BaseField):
"""请求查询保证金监管系统经纪公司资金账户密钥"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class BrokerUserOTPParamField(BaseField):
"""用户动态令牌参数"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('OTPVendorsID', c_char * 2), # 动态令牌提供商
('SerialNumber', c_char * 17), # 动态令牌序列号
('AuthKey', c_char * 41), # 令牌密钥
('LastDrift', c_int), # 漂移值
('LastSuccess', c_int), # 成功值
('OTPType', c_char * 1) # 动态令牌类型
]
class ManualSyncBrokerUserOTPField(BaseField):
"""手工同步用户动态令牌"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('OTPType', c_char * 1), # 动态令牌类型
('FirstOTP', c_char * 41), # 第一个动态密码
('SecondOTP', c_char * 41) # 第二个动态密码
]
class CommRateModelField(BaseField):
"""投资者手续费率模板"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('CommModelID', c_char * 13), # 手续费率模板代码
('CommModelName', c_char * 161) # 模板名称
]
class QryCommRateModelField(BaseField):
"""请求查询投资者手续费率模板"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('CommModelID', c_char * 13) # 手续费率模板代码
]
class MarginModelField(BaseField):
"""投资者保证金率模板"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('MarginModelID', c_char * 13), # 保证金率模板代码
('MarginModelName', c_char * 161) # 模板名称
]
class QryMarginModelField(BaseField):
"""请求查询投资者保证金率模板"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('MarginModelID', c_char * 13) # 保证金率模板代码
]
class EWarrantOffsetField(BaseField):
"""仓单折抵信息"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日期
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9), # 交易所代码
('InstrumentID', c_char * 31), # 合约代码
('Direction', c_char * 1), # 买卖方向
('HedgeFlag', c_char * 1), # 投机套保标志
('Volume', c_int), # 数量
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryEWarrantOffsetField(BaseField):
"""查询仓单折抵信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ExchangeID', c_char * 9), # 交易所代码
('InstrumentID', c_char * 31), # 合约代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QryInvestorProductGroupMarginField(BaseField):
"""查询投资者品种/跨品种保证金"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('ProductGroupID', c_char * 31), # 品种/跨品种标示
('HedgeFlag', c_char * 1), # 投机套保标志
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class InvestorProductGroupMarginField(BaseField):
"""投资者品种/跨品种保证金"""
_fields_ = [
('ProductGroupID', c_char * 31), # ///品种/跨品种标示
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('TradingDay', c_char * 9), # 交易日
('SettlementID', c_int), # 结算编号
('FrozenMargin', c_double), # 冻结的保证金
('LongFrozenMargin', c_double), # 多头冻结的保证金
('ShortFrozenMargin', c_double), # 空头冻结的保证金
('UseMargin', c_double), # 占用的保证金
('LongUseMargin', c_double), # 多头保证金
('ShortUseMargin', c_double), # 空头保证金
('ExchMargin', c_double), # 交易所保证金
('LongExchMargin', c_double), # 交易所多头保证金
('ShortExchMargin', c_double), # 交易所空头保证金
('CloseProfit', c_double), # 平仓盈亏
('FrozenCommission', c_double), # 冻结的手续费
('Commission', c_double), # 手续费
('FrozenCash', c_double), # 冻结的资金
('CashIn', c_double), # 资金差额
('PositionProfit', c_double), # 持仓盈亏
('OffsetAmount', c_double), # 折抵总金额
('LongOffsetAmount', c_double), # 多头折抵总金额
('ShortOffsetAmount', c_double), # 空头折抵总金额
('ExchOffsetAmount', c_double), # 交易所折抵总金额
('LongExchOffsetAmount', c_double), # 交易所多头折抵总金额
('ShortExchOffsetAmount', c_double), # 交易所空头折抵总金额
('HedgeFlag', c_char * 1), # 投机套保标志
('ExchangeID', c_char * 9), # 交易所代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class QueryCFMMCTradingAccountTokenField(BaseField):
"""查询监控中心用户令牌"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('InvestUnitID', c_char * 17) # 投资单元代码
]
class CFMMCTradingAccountTokenField(BaseField):
"""监控中心用户令牌"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('ParticipantID', c_char * 11), # 经纪公司统一编码
('AccountID', c_char * 13), # 投资者帐号
('KeyID', c_int), # 密钥编号
('Token', c_char * 21) # 动态令牌
]
class QryProductGroupField(BaseField):
"""查询产品组"""
_fields_ = [
('ProductID', c_char * 31), # ///产品代码
('ExchangeID', c_char * 9) # 交易所代码
]
class ProductGroupField(BaseField):
"""投资者品种/跨品种保证金产品组"""
_fields_ = [
('ProductID', c_char * 31), # ///产品代码
('ExchangeID', c_char * 9), # 交易所代码
('ProductGroupID', c_char * 31) # 产品组代码
]
class BulletinField(BaseField):
"""交易所公告"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('TradingDay', c_char * 9), # 交易日
('BulletinID', c_int), # 公告编号
('SequenceNo', c_int), # 序列号
('NewsType', c_char * 3), # 公告类型
('NewsUrgency', c_char * 1), # 紧急程度
('SendTime', c_char * 9), # 发送时间
('Abstract', c_char * 81), # 消息摘要
('ComeFrom', c_char * 21), # 消息来源
('Content', c_char * 501), # 消息正文
('URLLink', c_char * 201), # WEB地址
('MarketID', c_char * 31) # 市场代码
]
class QryBulletinField(BaseField):
"""查询交易所公告"""
_fields_ = [
('ExchangeID', c_char * 9), # ///交易所代码
('BulletinID', c_int), # 公告编号
('SequenceNo', c_int), # 序列号
('NewsType', c_char * 3), # 公告类型
('NewsUrgency', c_char * 1) # 紧急程度
]
class ReqOpenAccountField(BaseField):
"""转帐开户请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('CashExchangeCode', c_char * 1), # 汇钞标志
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('TID', c_int), # 交易ID
('UserID', c_char * 16), # 用户标识
('LongCustomerName', c_char * 161) # 长客户姓名
]
class ReqCancelAccountField(BaseField):
"""转帐销户请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('CashExchangeCode', c_char * 1), # 汇钞标志
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('TID', c_int), # 交易ID
('UserID', c_char * 16), # 用户标识
('LongCustomerName', c_char * 161) # 长客户姓名
]
class ReqChangeAccountField(BaseField):
"""变更银行账户请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('NewBankAccount', c_char * 41), # 新银行帐号
('NewBankPassWord', c_char * 41), # 新银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('BankAccType', c_char * 1), # 银行帐号类型
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('TID', c_int), # 交易ID
('Digest', c_char * 36), # 摘要
('LongCustomerName', c_char * 161) # 长客户姓名
]
class ReqTransferField(BaseField):
"""转账请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('FutureSerial', c_int), # 期货公司流水号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 转帐金额
('FutureFetchAmount', c_double), # 期货可取金额
('FeePayFlag', c_char * 1), # 费用支付标志
('CustFee', c_double), # 应收客户费用
('BrokerFee', c_double), # 应收期货公司费用
('Message', c_char * 129), # 发送方给接收方的消息
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('TransferStatus', c_char * 1), # 转账交易状态
('LongCustomerName', c_char * 161) # 长客户姓名
]
class RspTransferField(BaseField):
"""银行发起银行资金转期货响应"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('FutureSerial', c_int), # 期货公司流水号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 转帐金额
('FutureFetchAmount', c_double), # 期货可取金额
('FeePayFlag', c_char * 1), # 费用支付标志
('CustFee', c_double), # 应收客户费用
('BrokerFee', c_double), # 应收期货公司费用
('Message', c_char * 129), # 发送方给接收方的消息
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('TransferStatus', c_char * 1), # 转账交易状态
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('LongCustomerName', c_char * 161) # 长客户姓名
]
class ReqRepealField(BaseField):
"""冲正请求"""
_fields_ = [
('RepealTimeInterval', c_int), # ///冲正时间间隔
('RepealedTimes', c_int), # 已经冲正次数
('BankRepealFlag', c_char * 1), # 银行冲正标志
('BrokerRepealFlag', c_char * 1), # 期商冲正标志
('PlateRepealSerial', c_int), # 被冲正平台流水号
('BankRepealSerial', c_char * 13), # 被冲正银行流水号
('FutureRepealSerial', c_int), # 被冲正期货流水号
('TradeCode', c_char * 7), # 业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('FutureSerial', c_int), # 期货公司流水号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 转帐金额
('FutureFetchAmount', c_double), # 期货可取金额
('FeePayFlag', c_char * 1), # 费用支付标志
('CustFee', c_double), # 应收客户费用
('BrokerFee', c_double), # 应收期货公司费用
('Message', c_char * 129), # 发送方给接收方的消息
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('TransferStatus', c_char * 1), # 转账交易状态
('LongCustomerName', c_char * 161) # 长客户姓名
]
class RspRepealField(BaseField):
"""冲正响应"""
_fields_ = [
('RepealTimeInterval', c_int), # ///冲正时间间隔
('RepealedTimes', c_int), # 已经冲正次数
('BankRepealFlag', c_char * 1), # 银行冲正标志
('BrokerRepealFlag', c_char * 1), # 期商冲正标志
('PlateRepealSerial', c_int), # 被冲正平台流水号
('BankRepealSerial', c_char * 13), # 被冲正银行流水号
('FutureRepealSerial', c_int), # 被冲正期货流水号
('TradeCode', c_char * 7), # 业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('FutureSerial', c_int), # 期货公司流水号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 转帐金额
('FutureFetchAmount', c_double), # 期货可取金额
('FeePayFlag', c_char * 1), # 费用支付标志
('CustFee', c_double), # 应收客户费用
('BrokerFee', c_double), # 应收期货公司费用
('Message', c_char * 129), # 发送方给接收方的消息
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('TransferStatus', c_char * 1), # 转账交易状态
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('LongCustomerName', c_char * 161) # 长客户姓名
]
class ReqQueryAccountField(BaseField):
"""查询账户信息请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('FutureSerial', c_int), # 期货公司流水号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('LongCustomerName', c_char * 161) # 长客户姓名
]
class RspQueryAccountField(BaseField):
"""查询账户信息响应"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('FutureSerial', c_int), # 期货公司流水号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('BankUseAmount', c_double), # 银行可用金额
('BankFetchAmount', c_double), # 银行可取金额
('LongCustomerName', c_char * 161) # 长客户姓名
]
class FutureSignIOField(BaseField):
"""期商签到签退"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Digest', c_char * 36), # 摘要
('CurrencyID', c_char * 4), # 币种代码
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int) # 交易ID
]
class RspFutureSignInField(BaseField):
"""期商签到响应"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Digest', c_char * 36), # 摘要
('CurrencyID', c_char * 4), # 币种代码
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('PinKey', c_char * 129), # PIN密钥
('MacKey', c_char * 129) # MAC密钥
]
class ReqFutureSignOutField(BaseField):
"""期商签退请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Digest', c_char * 36), # 摘要
('CurrencyID', c_char * 4), # 币种代码
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int) # 交易ID
]
class RspFutureSignOutField(BaseField):
"""期商签退响应"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Digest', c_char * 36), # 摘要
('CurrencyID', c_char * 4), # 币种代码
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class ReqQueryTradeResultBySerialField(BaseField):
"""查询指定流水号的交易结果请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('Reference', c_int), # 流水号
('RefrenceIssureType', c_char * 1), # 本流水号发布者的机构类型
('RefrenceIssure', c_char * 36), # 本流水号发布者机构编码
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 转帐金额
('Digest', c_char * 36), # 摘要
('LongCustomerName', c_char * 161) # 长客户姓名
]
class RspQueryTradeResultBySerialField(BaseField):
"""查询指定流水号的交易结果响应"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('Reference', c_int), # 流水号
('RefrenceIssureType', c_char * 1), # 本流水号发布者的机构类型
('RefrenceIssure', c_char * 36), # 本流水号发布者机构编码
('OriginReturnCode', c_char * 7), # 原始返回代码
('OriginDescrInfoForReturnCode', c_char * 129), # 原始返回码描述
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 转帐金额
('Digest', c_char * 36) # 摘要
]
class ReqDayEndFileReadyField(BaseField):
"""日终文件就绪请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('FileBusinessCode', c_char * 1), # 文件业务功能
('Digest', c_char * 36) # 摘要
]
class ReturnResultField(BaseField):
"""返回结果"""
_fields_ = [
('ReturnCode', c_char * 7), # ///返回代码
('DescrInfoForReturnCode', c_char * 129) # 返回码描述
]
class VerifyFuturePasswordField(BaseField):
"""验证期货资金密码"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('InstallID', c_int), # 安装编号
('TID', c_int), # 交易ID
('CurrencyID', c_char * 4) # 币种代码
]
class VerifyCustInfoField(BaseField):
"""验证客户信息"""
_fields_ = [
('CustomerName', c_char * 51), # ///客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('LongCustomerName', c_char * 161) # 长客户姓名
]
class VerifyFuturePasswordAndCustInfoField(BaseField):
"""验证期货资金密码和客户信息"""
_fields_ = [
('CustomerName', c_char * 51), # ///客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('CurrencyID', c_char * 4), # 币种代码
('LongCustomerName', c_char * 161) # 长客户姓名
]
class DepositResultInformField(BaseField):
"""验证期货资金密码和客户信息"""
_fields_ = [
('DepositSeqNo', c_char * 15), # ///出入金流水号,该流水号为银期报盘返回的流水号
('BrokerID', c_char * 11), # 经纪公司代码
('InvestorID', c_char * 13), # 投资者代码
('Deposit', c_double), # 入金金额
('RequestID', c_int), # 请求编号
('ReturnCode', c_char * 7), # 返回代码
('DescrInfoForReturnCode', c_char * 129) # 返回码描述
]
class ReqSyncKeyField(BaseField):
"""交易核心向银期报盘发出密钥同步请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Message', c_char * 129), # 交易核心给银期报盘的消息
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int) # 交易ID
]
class RspSyncKeyField(BaseField):
"""交易核心向银期报盘发出密钥同步响应"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Message', c_char * 129), # 交易核心给银期报盘的消息
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class NotifyQueryAccountField(BaseField):
"""查询账户信息通知"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustType', c_char * 1), # 客户类型
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('FutureSerial', c_int), # 期货公司流水号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('BankUseAmount', c_double), # 银行可用金额
('BankFetchAmount', c_double), # 银行可取金额
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('LongCustomerName', c_char * 161) # 长客户姓名
]
class TransferSerialField(BaseField):
"""银期转账交易流水表"""
_fields_ = [
('PlateSerial', c_int), # ///平台流水号
('TradeDate', c_char * 9), # 交易发起方日期
('TradingDay', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('TradeCode', c_char * 7), # 交易代码
('SessionID', c_int), # 会话编号
('BankID', c_char * 4), # 银行编码
('BankBranchID', c_char * 5), # 银行分支机构编码
('BankAccType', c_char * 1), # 银行帐号类型
('BankAccount', c_char * 41), # 银行帐号
('BankSerial', c_char * 13), # 银行流水号
('BrokerID', c_char * 11), # 期货公司编码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('FutureAccType', c_char * 1), # 期货公司帐号类型
('AccountID', c_char * 13), # 投资者帐号
('InvestorID', c_char * 13), # 投资者代码
('FutureSerial', c_int), # 期货公司流水号
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CurrencyID', c_char * 4), # 币种代码
('TradeAmount', c_double), # 交易金额
('CustFee', c_double), # 应收客户费用
('BrokerFee', c_double), # 应收期货公司费用
('AvailabilityFlag', c_char * 1), # 有效标志
('OperatorCode', c_char * 17), # 操作员
('BankNewAccount', c_char * 41), # 新银行帐号
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class QryTransferSerialField(BaseField):
"""请求查询转帐流水"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('BankID', c_char * 4), # 银行编码
('CurrencyID', c_char * 4) # 币种代码
]
class NotifyFutureSignInField(BaseField):
"""期商签到通知"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Digest', c_char * 36), # 摘要
('CurrencyID', c_char * 4), # 币种代码
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('PinKey', c_char * 129), # PIN密钥
('MacKey', c_char * 129) # MAC密钥
]
class NotifyFutureSignOutField(BaseField):
"""期商签退通知"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Digest', c_char * 36), # 摘要
('CurrencyID', c_char * 4), # 币种代码
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class NotifySyncKeyField(BaseField):
"""交易核心向银期报盘发出密钥同步处理结果的通知"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('InstallID', c_int), # 安装编号
('UserID', c_char * 16), # 用户标识
('Message', c_char * 129), # 交易核心给银期报盘的消息
('DeviceID', c_char * 3), # 渠道标志
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('OperNo', c_char * 17), # 交易柜员
('RequestID', c_int), # 请求编号
('TID', c_int), # 交易ID
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class QryAccountregisterField(BaseField):
"""请求查询银期签约关系"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('BankID', c_char * 4), # 银行编码
('BankBranchID', c_char * 5), # 银行分支机构编码
('CurrencyID', c_char * 4) # 币种代码
]
class AccountregisterField(BaseField):
"""客户开销户信息表"""
_fields_ = [
('TradeDay', c_char * 9), # ///交易日期
('BankID', c_char * 4), # 银行编码
('BankBranchID', c_char * 5), # 银行分支机构编码
('BankAccount', c_char * 41), # 银行帐号
('BrokerID', c_char * 11), # 期货公司编码
('BrokerBranchID', c_char * 31), # 期货公司分支机构编码
('AccountID', c_char * 13), # 投资者帐号
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('CustomerName', c_char * 51), # 客户姓名
('CurrencyID', c_char * 4), # 币种代码
('OpenOrDestroy', c_char * 1), # 开销户类别
('RegDate', c_char * 9), # 签约日期
('OutDate', c_char * 9), # 解约日期
('TID', c_int), # 交易ID
('CustType', c_char * 1), # 客户类型
('BankAccType', c_char * 1), # 银行帐号类型
('LongCustomerName', c_char * 161) # 长客户姓名
]
class OpenAccountField(BaseField):
"""银期开户信息"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('CashExchangeCode', c_char * 1), # 汇钞标志
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('TID', c_int), # 交易ID
('UserID', c_char * 16), # 用户标识
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('LongCustomerName', c_char * 161) # 长客户姓名
]
class CancelAccountField(BaseField):
"""银期销户信息"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('CashExchangeCode', c_char * 1), # 汇钞标志
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('DeviceID', c_char * 3), # 渠道标志
('BankSecuAccType', c_char * 1), # 期货单位帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankSecuAcc', c_char * 41), # 期货单位帐号
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('OperNo', c_char * 17), # 交易柜员
('TID', c_int), # 交易ID
('UserID', c_char * 16), # 用户标识
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('LongCustomerName', c_char * 161) # 长客户姓名
]
class ChangeAccountField(BaseField):
"""银期变更银行账号信息"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 51), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('NewBankAccount', c_char * 41), # 新银行帐号
('NewBankPassWord', c_char * 41), # 新银行密码
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('BankAccType', c_char * 1), # 银行帐号类型
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('BankPwdFlag', c_char * 1), # 银行密码标志
('SecuPwdFlag', c_char * 1), # 期货资金密码核对标志
('TID', c_int), # 交易ID
('Digest', c_char * 36), # 摘要
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81), # 错误信息
('LongCustomerName', c_char * 161) # 长客户姓名
]
class SecAgentACIDMapField(BaseField):
"""二级代理操作员银期权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('AccountID', c_char * 13), # 资金账户
('CurrencyID', c_char * 4), # 币种
('BrokerSecAgentID', c_char * 13) # 境外中介机构资金帐号
]
class QrySecAgentACIDMapField(BaseField):
"""二级代理操作员银期权限查询"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('AccountID', c_char * 13), # 资金账户
('CurrencyID', c_char * 4) # 币种
]
class UserRightsAssignField(BaseField):
"""灾备中心交易权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///应用单元代码
('UserID', c_char * 16), # 用户代码
('DRIdentityID', c_int) # 交易中心代码
]
class BrokerUserRightAssignField(BaseField):
"""经济公司是否有在本标示的交易权限"""
_fields_ = [
('BrokerID', c_char * 11), # ///应用单元代码
('DRIdentityID', c_int), # 交易中心代码
('Tradeable', c_int) # 能否交易
]
class DRTransferField(BaseField):
"""灾备交易转换报文"""
_fields_ = [
('OrigDRIdentityID', c_int), # ///原交易中心代码
('DestDRIdentityID', c_int), # 目标交易中心代码
('OrigBrokerID', c_char * 11), # 原应用单元代码
('DestBrokerID', c_char * 11) # 目标易用单元代码
]
class FensUserInfoField(BaseField):
"""Fens用户信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('LoginMode', c_char * 1) # 登录模式
]
class CurrTransferIdentityField(BaseField):
"""当前银期所属交易中心"""
_fields_ = [
('IdentityID', c_int) # ///交易中心代码
]
class LoginForbiddenUserField(BaseField):
"""禁止登录用户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('IPAddress', c_char * 16) # IP地址
]
class QryLoginForbiddenUserField(BaseField):
"""查询禁止登录用户"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class MulticastGroupInfoField(BaseField):
"""UDP组播组信息"""
_fields_ = [
('GroupIP', c_char * 16), # ///组播组IP地址
('GroupPort', c_int), # 组播组IP端口
('SourceIP', c_char * 16) # 源地址
]
class TradingAccountReserveField(BaseField):
"""资金账户基本准备金"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('Reserve', c_double), # 基本准备金
('CurrencyID', c_char * 4) # 币种代码
]
class QryLoginForbiddenIPField(BaseField):
"""查询禁止登录IP"""
_fields_ = [
('IPAddress', c_char * 16) # ///IP地址
]
class QryIPListField(BaseField):
"""查询IP列表"""
_fields_ = [
('IPAddress', c_char * 16) # ///IP地址
]
class QryUserRightsAssignField(BaseField):
"""查询用户下单权限分配表"""
_fields_ = [
('BrokerID', c_char * 11), # ///应用单元代码
('UserID', c_char * 16) # 用户代码
]
class ReserveOpenAccountConfirmField(BaseField):
"""银期预约开户确认请求"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 161), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('TID', c_int), # 交易ID
('AccountID', c_char * 13), # 投资者帐号
('Password', c_char * 41), # 期货密码
('BankReserveOpenSeq', c_char * 13), # 预约开户银行流水号
('BookDate', c_char * 9), # 预约开户日期
('BookPsw', c_char * 41), # 预约开户验证密码
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class ReserveOpenAccountField(BaseField):
"""银期预约开户"""
_fields_ = [
('TradeCode', c_char * 7), # ///业务功能码
('BankID', c_char * 4), # 银行代码
('BankBranchID', c_char * 5), # 银行分支机构代码
('BrokerID', c_char * 11), # 期商代码
('BrokerBranchID', c_char * 31), # 期商分支机构代码
('TradeDate', c_char * 9), # 交易日期
('TradeTime', c_char * 9), # 交易时间
('BankSerial', c_char * 13), # 银行流水号
('TradingDay', c_char * 9), # 交易系统日期
('PlateSerial', c_int), # 银期平台消息流水号
('LastFragment', c_char * 1), # 最后分片标志
('SessionID', c_int), # 会话号
('CustomerName', c_char * 161), # 客户姓名
('IdCardType', c_char * 1), # 证件类型
('IdentifiedCardNo', c_char * 51), # 证件号码
('Gender', c_char * 1), # 性别
('CountryCode', c_char * 21), # 国家代码
('CustType', c_char * 1), # 客户类型
('Address', c_char * 101), # 地址
('ZipCode', c_char * 7), # 邮编
('Telephone', c_char * 41), # 电话号码
('MobilePhone', c_char * 21), # 手机
('Fax', c_char * 41), # 传真
('EMail', c_char * 41), # 电子邮件
('MoneyAccountStatus', c_char * 1), # 资金账户状态
('BankAccount', c_char * 41), # 银行帐号
('BankPassWord', c_char * 41), # 银行密码
('InstallID', c_int), # 安装编号
('VerifyCertNoFlag', c_char * 1), # 验证客户证件号码标志
('CurrencyID', c_char * 4), # 币种代码
('Digest', c_char * 36), # 摘要
('BankAccType', c_char * 1), # 银行帐号类型
('BrokerIDByBank', c_char * 33), # 期货公司银行编码
('TID', c_int), # 交易ID
('ReserveOpenAccStas', c_char * 1), # 预约开户状态
('ErrorID', c_int), # 错误代码
('ErrorMsg', c_char * 81) # 错误信息
]
class AccountPropertyField(BaseField):
"""银行账户属性"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('AccountID', c_char * 13), # 投资者帐号
('BankID', c_char * 4), # 银行统一标识类型
('BankAccount', c_char * 41), # 银行账户
('OpenName', c_char * 101), # 银行账户的开户人名称
('OpenBank', c_char * 101), # 银行账户的开户行
('IsActive', c_int), # 是否活跃
('AccountSourceType', c_char * 1), # 账户来源
('OpenDate', c_char * 9), # 开户日期
('CancelDate', c_char * 9), # 注销日期
('OperatorID', c_char * 65), # 录入员代码
('OperateDate', c_char * 9), # 录入日期
('OperateTime', c_char * 9), # 录入时间
('CurrencyID', c_char * 4) # 币种代码
]
class QryCurrDRIdentityField(BaseField):
"""查询当前交易中心"""
_fields_ = [
('DRIdentityID', c_int) # ///交易中心代码
]
class CurrDRIdentityField(BaseField):
"""当前交易中心"""
_fields_ = [
('DRIdentityID', c_int) # ///交易中心代码
]
class QrySecAgentCheckModeField(BaseField):
"""查询二级代理商资金校验模式"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('InvestorID', c_char * 13) # 投资者代码
]
class QrySecAgentTradeInfoField(BaseField):
"""查询二级代理商信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('BrokerSecAgentID', c_char * 13) # 境外中介机构资金帐号
]
class UserSystemInfoField(BaseField):
"""用户系统信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('ClientSystemInfoLen', c_int), # 用户端系统内部信息长度
('ClientSystemInfo', c_char * 273), # 用户端系统内部信息
('ClientPublicIP', c_char * 16), # 用户公网IP
('ClientIPPort', c_int), # 终端IP端口
('ClientLoginTime', c_char * 9), # 登录成功时间
('ClientAppID', c_char * 33) # App代码
]
class ReqUserAuthMethodField(BaseField):
"""用户发出获取安全安全登陆方法请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class RspUserAuthMethodField(BaseField):
"""用户发出获取安全安全登陆方法回复"""
_fields_ = [
('UsableAuthMethod', c_int) # ///当前可以用的认证模式
]
class ReqGenUserCaptchaField(BaseField):
"""用户发出获取安全安全登陆方法请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class RspGenUserCaptchaField(BaseField):
"""生成的图片验证码信息"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('CaptchaInfoLen', c_int), # 图片信息长度
('CaptchaInfo', c_char * 2561) # 图片信息
]
class ReqGenUserTextField(BaseField):
"""用户发出获取安全安全登陆方法请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16) # 用户代码
]
class RspGenUserTextField(BaseField):
"""短信验证码生成的回复"""
_fields_ = [
('UserTextSeq', c_int) # ///短信验证码序号
]
class ReqUserLoginWithCaptchaField(BaseField):
"""用户发出带图形验证码的登录请求请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('Password', c_char * 41), # 密码
('UserProductInfo', c_char * 11), # 用户端产品信息
('InterfaceProductInfo', c_char * 11), # 接口端产品信息
('ProtocolInfo', c_char * 11), # 协议信息
('MacAddress', c_char * 21), # Mac地址
('ClientIPAddress', c_char * 16), # 终端IP地址
('LoginRemark', c_char * 36), # 登录备注
('Captcha', c_char * 41), # 图形验证码的文字内容
('ClientIPPort', c_int) # 终端IP端口
]
class ReqUserLoginWithTextField(BaseField):
"""用户发出带短信验证码的登录请求请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('Password', c_char * 41), # 密码
('UserProductInfo', c_char * 11), # 用户端产品信息
('InterfaceProductInfo', c_char * 11), # 接口端产品信息
('ProtocolInfo', c_char * 11), # 协议信息
('MacAddress', c_char * 21), # Mac地址
('ClientIPAddress', c_char * 16), # 终端IP地址
('LoginRemark', c_char * 36), # 登录备注
('Text', c_char * 41), # 短信验证码文字内容
('ClientIPPort', c_int) # 终端IP端口
]
class ReqUserLoginWithOTPField(BaseField):
"""用户发出带动态验证码的登录请求请求"""
_fields_ = [
('TradingDay', c_char * 9), # ///交易日
('BrokerID', c_char * 11), # 经纪公司代码
('UserID', c_char * 16), # 用户代码
('Password', c_char * 41), # 密码
('UserProductInfo', c_char * 11), # 用户端产品信息
('InterfaceProductInfo', c_char * 11), # 接口端产品信息
('ProtocolInfo', c_char * 11), # 协议信息
('MacAddress', c_char * 21), # Mac地址
('ClientIPAddress', c_char * 16), # 终端IP地址
('LoginRemark', c_char * 36), # 登录备注
('OTPPassword', c_char * 41), # OTP密码
('ClientIPPort', c_int) # 终端IP端口
]
class ReqApiHandshakeField(BaseField):
"""api握手请求"""
_fields_ = [
('CryptoKeyVersion', c_char * 31) # ///api与front通信密钥版本号
]
class RspApiHandshakeField(BaseField):
"""front发给api的握手回复"""
_fields_ = [
('FrontHandshakeDataLen', c_int), # ///握手回复数据长度
('FrontHandshakeData', c_char * 301), # 握手回复数据
('IsApiAuthEnabled', c_int) # API认证是否开启
]
class ReqVerifyApiKeyField(BaseField):
"""api给front的验证key的请求"""
_fields_ = [
('ApiHandshakeDataLen', c_int), # ///握手回复数据长度
('ApiHandshakeData', c_char * 301) # 握手回复数据
]
class DepartmentUserField(BaseField):
"""操作员组织架构关系"""
_fields_ = [
('BrokerID', c_char * 11), # ///经纪公司代码
('UserID', c_char * 16), # 用户代码
('InvestorRange', c_char * 1), # 投资者范围
('InvestorID', c_char * 13) # 投资者代码
]
class QueryFreqField(BaseField):
"""查询频率,每秒查询比数"""
_fields_ = [
('QueryFreq', c_int) # ///查询频率
]
def __init__(self, QueryFreq=0):
super(QueryFreqField, self).__init__()
self.QueryFreq = int(QueryFreq)
def from_tuple(self, i_tuple):
self.QueryFreq = int(i_tuple[1])
|
AlgoPlus
|
/CTP/ApiStruct.py
|
ApiStruct.py
|
# AlgoPlus量化投资开源框架范例
# 微信公众号:AlgoPlus
# 项目网址:http://www.algo.plus
import time
from AlgoPlus.CTP.TraderApiBase import TraderApiBase
from AlgoPlus.CTP.FutureAccount import FutureAccount
class ReqAuthenticate(TraderApiBase):
def __init__(self, broker_id, td_server, investor_id, password, app_id, auth_code, md_queue=None,
page_dir='', private_resume_type=2, public_resume_type=2):
pass
def init_extra(self):
"""
初始化策略参数
:return:
"""
# {
# 'ExchangeID': b'', # 交易所
# 'InstrumentID': b'', # 合约代码
# 'UpperLimitPrice': 0.0, # 涨停板
# 'LowerLimitPrice': 0.0, # 跌停板
# 'Volume': 1, # 报单手数
# }
self.parameter_dict = self.md_queue.get(block=False)
# ############################################################################# #
def OnRtnOrder(self, pOrder):
# self.write_log('OnRtnOrder', pOrder)
pass
# ############################################################################# #
def OnRtnTrade(self, pTrade):
# self.write_log('OnRtnTrade', pTrade)
pass
def OnRspQryOrder(self, pOrder, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryOrder', "查询结果,避免内容太长不输出。")
def OnRspQryTrade(self, pTrade, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryTrade', "查询结果,避免内容太长不输出。")
def OnRspQryInvestorPosition(self, pInvestorPosition, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryInvestorPosition', "查询结果,避免内容太长不输出。")
def OnRspQryTradingAccount(self, pTradingAccount, pRspInfo, nRequestID, bIsLast):
if bIsLast:
self.write_log('OnRspQryTradingAccount', "查询结果,避免内容太长不输出。")
def Join(self):
while True:
if self.status >= 0 and isinstance(self.parameter_dict, dict):
# ############################################################################# #
# 连续5次买开 - 卖平
ikk = 0
while ikk < 5:
ikk += 1
self.buy_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['UpperLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>{ikk}=>发出涨停买开仓请求!")
time.sleep(3)
# 跌停卖平仓
self.sell_close(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['LowerLimitPrice'], self.parameter_dict['Volume'], True)
self.write_log(f"=>{ikk}=>发出跌停卖平仓请求!")
# ############################################################################# #
# 连续5次卖开 - 买平
ikk = 0
while ikk < 5:
ikk += 1
# 跌停卖开仓
self.sell_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['LowerLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>{ikk}=>发出跌停卖开仓请求!")
time.sleep(3)
# 涨停买平仓
self.buy_close(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['UpperLimitPrice'], self.parameter_dict['Volume'], True)
self.write_log(f"=>{ikk}=>发出涨停买平仓请求!")
# ############################################################################# #
# 买开 - 撤单
self.buy_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['LowerLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>发出涨停买开仓请求!")
time.sleep(3)
# 撤单
self.req_order_action(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.order_ref)
self.write_log(f"=>发出撤单请求!")
# ############################################################################# #
# 卖开 - 撤单
self.sell_open(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.parameter_dict['UpperLimitPrice'], self.parameter_dict['Volume'])
self.write_log(f"=>发出跌停卖开仓请求!")
time.sleep(3)
# 撤单
self.req_order_action(self.parameter_dict['ExchangeID'], self.parameter_dict['InstrumentID'], self.order_ref)
self.write_log(f"=>发出撤单请求!")
# ############################################################################# #
# 查询订单
self.req_qry_order()
self.write_log(f"=>发出查询订单请求!")
time.sleep(3)
# ############################################################################# #
# 查询成交
self.req_qry_trade()
self.write_log(f"=>发出查询成交请求!")
time.sleep(3)
# ############################################################################# #
# 查询持仓
self.req_qry_investor_position()
self.write_log(f"=>发出查询持仓请求!")
time.sleep(3)
# ############################################################################# #
# 查询资金
self.req_qry_trading_account()
self.write_log(f"=>发出查询资金请求!")
time.sleep(3)
# ############################################################################# #
print("看穿式监管认证仿真交易已经完成!可联系期货公司!")
break
time.sleep(1)
def run_req_authenticate(account, md_queue):
if isinstance(account, FutureAccount):
trader_engine = ReqAuthenticate(
account.broker_id,
account.server_dict['TDServer'],
account.investor_id,
account.password,
account.app_id,
account.auth_code,
md_queue,
account.td_page_dir
)
trader_engine.Join()
|
AlgoPlus
|
/CTP/ReqAuthenticate.py
|
ReqAuthenticate.py
|
# AlgoSolver
[](https://www.apache.org/licenses/LICENSE-2.0)
[](https://github.com/Nickbohm555/AlgoSolver/issues)
[](https://github.com/Nickbohm555/AlgoSolver/actions/workflows/build.yml)
[](https://app.codecov.io/gh/Nickbohm555/AlgoSolver/tree/main)
[](https://pypi.org/project/AlgoSolver/)
[](https://algosolver.readthedocs.io/en/latest/)
## Overview
My goal is to develop a library in python to help programmers solve a variety of algorithms style questions. For example, with Dynamic Programming problems the main things we need to know are if we are using a 1D of 2D array, what the base case is, and what the recurrence relation is. With this in mind, we can create multiple templates for different types of questions to take out the implementation process. I wanted to have a special focus on graph algorithms for the library. This will allow programmers to spend more of their time thinking about high level design questions.
## Installation
To install, run the following:
```
pip install AlgoSolver
```
## Usage
### Searching
The following code will find the index of a given number 'num' in an array 'arr' .
`arr`: An array you are searching through.
`num`: A number you are searching for.
```python
from algo_solver import searching as s
index = s.binary_search(arr = [1,5,3,2], num = 3)
print(index)
```
### Graphs
The following code will give a set of all nodes in the graph through bfs. It is given a graph 'graph' and starting point 'start'.
`graph`: A graph given as an input in the form: {x: [y]], [y: [z]}, ect;
`start`: A node which you are starting at.
```python
from algo_solver import graphs as g
index = g.bfs(graph = {2: [3], 3: []} start = 2)
print(index)
```
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/README.md
|
README.md
|
# Contributing
To contribute to [AlgoSolver](https://github.com/nickbohm555/AlgoSolver/), please adhere to the following guidelines:
## Prerequisites
Make sure `python` is installed on your computer.
## Cloning the Repo
First, `fork` the main branch of the repo. For more information on forking, please read this [tutorial](https://docs.github.com/en/get-started/quickstart/fork-a-repo).
Then, in your local environment, run `git clone https://github.com/<your_username>/AlgoSolver.git`.
## Installing Dependencies
Before you begin working on your contributions, you need to make sure you have all of the library's dependencies installed. To do that, simply run `make develop`.
## Before Opening a Pull Request (PR)
Before submitting your contributions through a [PR](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests), make sure to do the following:
- Write tests for any new features you build
- `make lint` to run static analysis
- `make format` to run autoformatting
- `make test` to make sure all tests pass
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/CONTRIBUTING.md
|
CONTRIBUTING.md
|
# Quick Guide
AlgoSolver allows developers to implement useful algorithms as a black box.
[](https://www.apache.org/licenses/LICENSE-2.0)
[](https://github.com/Nickbohm555/AlgoSolver/issues)
[](https://github.com/Nickbohm555/AlgoSolver/actions/workflows/build.yml)
[](https://app.codecov.io/gh/Nickbohm555/AlgoSolver/tree/main)
[](https://pypi.org/project/AlgoSolver/)
## Overview
My goal is to develop a library in python to help programmers solve a variety of algorithms style questions. For example, with Dynamic Programming problems the main things we need to know are if we are using a 1D of 2D array, what the base case is, and what the recurrence relation is. With this in mind, we can create multiple templates for different types of questions to take out the implementation process. I wanted to have a special focus on graph algorithms for the library. This will allow programmers to spend more of their time thinking about high level design questions.
## Installation
To install, run the following:
```
pip install pip install AlgoSolver
```
## Usage
### Sorting
The following code will sort an array for you using merge sort and return the sorted array .
`arr`: An array you are searching through.
```python
from algo_solver import sorting as s
arr = s.merge_sort(arr = [1,5,3,2])
print(arr)
```
### Searching
The following code will find the index of a given number 'num' in an array 'arr' .
`arr`: An array you are searching through.
`num`: A number you are searching for.
```python
from algo_solver import searching as s
index = s.binary_search(arr = [1,5,3,2], num = 3)
print(index)
```
### Graphs
The following code will give a set of all nodes in the graph through bfs. It is given a graph 'graph' and starting point 'start'.
`graph`: A Graph you are traversing through.
`start`: The Node of the graph you are searching from.
```python
from algo_solver import graphs as g
index = g.bfs(graph = {2: [3], 3: []} start = 2)
print(index)
```
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/docs/quickguide.md
|
quickguide.md
|
# Welcome to AlgoSolver's documentation!
This library is a black box for developers to use to solve algorithms style problems.
[](https://www.apache.org/licenses/LICENSE-2.0)
[](https://github.com/Nickbohm555/AlgoSolver/issues)
[](https://github.com/Nickbohm555/AlgoSolver/actions/workflows/build.yml)
[](https://app.codecov.io/gh/Nickbohm555/AlgoSolver/tree/main)
[](https://pypi.org/project/AlgoSolver/)
## Overview
My goal is to develop a library in python to help programmers solve a variety of algorithms style questions. For example, with Dynamic Programming problems the main things we need to know are if we are using a 1D of 2D array, what the base case is, and what the recurrence relation is. With this in mind, we can create multiple templates for different types of questions to take out the implementation process. I wanted to have a special focus on graph algorithms for the library. This will allow programmers to spend more of their time thinking about high level design questions.
```{eval-rst}
.. toctree::
:maxdepth: 2
:caption: Contents:
quickguide.md
sorting.rst
searching.rst
graphs.rst
tutorial.rst
contributing.rst
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/docs/index.md
|
index.md
|
# Contributing
To contribute to [AlgoSolver](https://github.com/nickbohm555/AlgoSolver/), please adhere to the following guidelines:
## Prerequisites
Make sure `python` is installed on your computer.
## Cloning the Repo
First, `fork` the main branch of the repo. For more information on forking, please read this [tutorial](https://docs.github.com/en/get-started/quickstart/fork-a-repo).
Then, in your local environment, run:
```
git clone https://github.com/<your_username>/AlgoSolver.git
```
## Installing Dependencies
Before you begin working on your contributions, you need to make sure you have all of the library's dependencies installed. To do that, simply run `make develop`.
## Before Opening a Pull Request (PR)
Before submitting your contributions through a [PR](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests), make sure to do the following:
- Write tests for any new features you build
- `make lint` to run static analysis
- `make format` to run autoformatting
- `make test` to make sure all tests pass
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/docs/contributing.md
|
contributing.md
|
# Tutorial
Here is an example of someone using the [AlgoSolver](https://github.com/nickbohm555/AlgoSolver/) library for a competetive programming competition or coding inteerview.
## Prerequisites
Before starting, make sure to run the following:
```
pip install AlgoSolver
```
Also at the top of the file, make sure to import the library.
```
from algo_solver import sorting as sort
from algo_solver import searching as search
from algo_solver import graphs as graph
```
## Example problem Number 1
It is quite common to perform DFS in technical interviews or CP competitions. Here is an example. Find the shortest path between point every point on the graph to point 'F'. Let's say the input is given below:
```
input_graph = {
'A': set(['B', 'C']),
'B': set(['A', 'D', 'E']),
'C': set(['A', 'F']),
'D': set(['B']),
'E': set(['B', 'F']),
'F': set(['C', 'E']),
}
find_shortest(input_graph, 'F'):
return the distance of the shortest path from A to F.
```
## Solution
With AlgoSolver, save time on implementation.
```python
from algo_solver import graphs as graph
find_shortest(input_graph, 'F'):
# get a list of every key
for key in input_graph:
starting = key
distance = graph.find_distance_unweighted_graph(input_graph, starting , 'F')
print(distance)
```
## Example problem Number 2
It is quite common to perform binary search in technical interviews or CP competitions. Here is an example. Find whether a number 22 exists in a sorted array. We could do this in linear time which would take O(n) time but using bianry search we would save us time to O(logn).
```
input_array = [4, 6, 7, 10, 12, 15, 22, 99, 111, 256, 777]
does_exist(input_array, 22):
return whether or not 22 exists in the array.
```
## Solution
With AlgoSolver, save time on implementation.
```python
from algo_solver import searching as search
does_exist(input_array, num):
index = search.binary_search(input_graph, num)
if index == -1:
print('does not exist')
else:
print('Exists - found in logn time')
```
## Example problem Number 3
It is quite common to perform sorting operations in technical interviews or CP competitions. Here is an example. Sort an array in O(n) time with the knowledge that the max value is not greater than 50. Here we can perform bucket sort on an array. No need to memorize the implementation.
```
input_array = [33,22,11,21,34,32,19, 23, 39, 1, 4, 6]
```
## Solution
With AlgoSolver, save time on implementation.
```python
from algo_solver import sorting as sort
new_arr = sort.bucket_sort(input_array)
print(new_arr)
```
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/docs/tutorial.md
|
tutorial.md
|
from collections import deque
def bfs(graph, start):
"""
Performs breadth first search on a graph with a given starting point.
Args:
graph (dict): The graph we are traversing.
start (int): The starting node to traverse from.
Returns:
set: The set of all nodes we visit.
"""
visited = set()
queue = deque([start])
while queue:
vertex = queue.popleft()
if vertex not in visited:
visited.add(vertex)
queue.extend(graph[vertex] - visited)
return visited
def dfs(graph, start):
"""
Performs depth first search on a graph with a given starting point.
Args:
graph (dict): The graph we are traversing.
start (int): The starting node to traverse from.
Returns:
set: The set of all nodes we visit.
"""
visited = set()
def dfs_helper(vertex):
visited.add(vertex)
for neighbor in graph[vertex]:
if neighbor not in visited:
dfs_helper(neighbor)
dfs_helper(start)
return visited
def find_distance_unweighted_graph(graph, start, end):
"""
Performs breadth first search on a graph with a given starting point to find the distance to end.
Args:
graph (dict): The graph we are traversing.
start (int): The starting node to traverse from.
end (int): The destination we are trying to reach.
Returns:
int: The distance fom start to end of an unweighted graph.
"""
visited = set()
queue = deque([(start, 0)])
while queue:
vertex, distance = queue.popleft()
if vertex not in visited:
visited.add(vertex)
if vertex == end:
return distance
for neighbor in graph[vertex]:
if neighbor not in visited:
queue.append((neighbor, distance + 1))
return -1
def dfs(graph, start):
"""
Performs depth first search on a graph with a given starting point.
Args:
graph (dict): The graph we are traversing.
start (int): The starting node to traverse from.
Returns:
set: The set of all nodes we visit.
"""
visited = set()
def dfs_helper(vertex):
visited.add(vertex)
for neighbor in graph[vertex]:
if neighbor not in visited:
dfs_helper(neighbor)
dfs_helper(start)
return visited
import heapq
def dijkstra(graph, start):
"""
Computes the shortest path to every node in a graph using Dijkstra's Algorithm.
Args:
graph (dict): The graph we are traversing.
start (int): The starting node to find shortest paths from.
Returns:
dict: A dictionary containing the shortest distance to each node from the starting node.
"""
distance = {node: float('inf') for node in graph}
distance[start] = 0
queue = [(0, start)]
while queue:
current_distance, current_node = heapq.heappop(queue)
if current_distance > distance[current_node]:
continue
for neighbor, weight in graph[current_node].items():
new_distance = current_distance + weight
if new_distance < distance[neighbor]:
distance[neighbor] = new_distance
heapq.heappush(queue, (new_distance, neighbor))
return distance
def topological_sort(graph):
"""
Sorts a directed acyclic graph in topological order.
Args:
graph (dict): The directed acyclic graph to sort.
Returns:
list: The nodes of the graph in topological order.
"""
in_degrees = {node: 0 for node in graph}
for node in graph:
for neighbor in graph[node]:
in_degrees[neighbor] += 1
queue = []
for node in in_degrees:
if in_degrees[node] == 0:
queue.append(node)
top_order = []
while queue:
node = queue.pop(0)
top_order.append(node)
for neighbor in graph[node]:
in_degrees[neighbor] -= 1
if in_degrees[neighbor] == 0:
queue.append(neighbor)
return top_order
def bellman_ford(graph, start):
"""
Finds the shortest path from a starting node to all other nodes
in a weighted graph with negative edge weights.
Args:
graph (dict): The weighted graph represented as a dictionary of dictionaries.
start (any): The starting node for the algorithm.
Returns:
dict: A dictionary of shortest path distances for each node in the graph.
"""
distances = {node: float('inf') for node in graph}
distances[start] = 0
for _ in range(len(graph) - 1):
for node in graph:
for neighbor in graph[node]:
new_distance = distances[node] + graph[node][neighbor]
if new_distance < distances[neighbor]:
distances[neighbor] = new_distance
# Check for negative weight cycles
for node in graph:
for neighbor in graph[node]:
if distances[node] + graph[node][neighbor] < distances[neighbor]:
raise ValueError("Negative weight cycle detected")
return distances
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/Algo_Solver/graphs.py
|
graphs.py
|
import random
def binary_search(arr, num):
"""
Performs binary search on an array given a specifc integer to find.
Args:
arr (array): The array we are searching.
num (int): The integer we are looking for.
Returns:
int: The index of the specified number.
"""
low = 0
high = len(arr) - 1
while low <= high:
mid = (low + high) // 2
if arr[mid] < num:
low = mid + 1
elif arr[mid] > num:
high = mid - 1
else:
return mid
return -1
def selection_algo(arr, k):
"""
Performs selection algorithm on an array to find kth smallest value.
Args:
arr (array): An unsorted array.
k (int): The index in the array from the end.
Returns:
int: The number of the kth smallest.
"""
if len(arr) == 1:
return arr[0]
pivot = random.choice(arr)
lows = [el for el in arr if el < pivot]
highs = [el for el in arr if el > pivot]
pivots = [el for el in arr if el == pivot]
if k < len(lows):
return selection_algo(lows, k)
elif k < len(lows) + len(pivots):
return pivots[0]
else:
return selection_algo(highs, k - len(lows) - len(pivots))
def linear_search(arr, num):
"""
Performs linear search on an array given a specific integer to find.
Args:
arr (array): The array we are searching.
num (int): The integer we are looking for.
Returns:
int: The index of the specified number.
"""
for i in range(len(arr)):
if arr[i] == num:
return i
return -1
def find_duplicates(arr):
"""
Finds duplicates in an array using hashing.
Args:
arr (list): The array we are searching for duplicates.
Returns:
list: A list of all the duplicate elements in the array.
"""
seen = set()
duplicates = set()
for elem in arr:
if elem in seen:
duplicates.add(elem)
else:
seen.add(elem)
return list(duplicates)
def is_palindrome(s):
"""
Checks if a given string is a palindrome or not.
Args:
s (str): The string to check.
Returns:
bool: True if the string is a palindrome, False otherwise.
"""
s = s.lower()
s = ''.join(c for c in s if c.isalnum())
return s == s[::-1]
def count_values(head):
"""
Counts the number of times each value appears in a linked list.
Args:
head (ListNode): The head node of the linked list.
Returns:
dict: A dictionary with keys as values in the linked list and values as the count of occurrences.
"""
counts = {}
curr = head
while curr:
if curr.val not in counts:
counts[curr.val] = 1
else:
counts[curr.val] += 1
curr = curr.next
return counts
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/Algo_Solver/searching.py
|
searching.py
|
def bubble_sort(arr):
"""
This algorithm performs bubble sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
array_len = len(arr)
for i in range(array_len):
for j in range(0, array_len - i - 1):
if arr[j] > arr[j + 1]:
arr[j], arr[j + 1] = arr[j + 1], arr[j]
return arr
def insertion_sort(arr):
"""
Performs insertion sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
array_len = len(arr)
for i in range(1, array_len):
key = arr[i]
j = i - 1
while j >= 0 and key < arr[j]:
arr[j + 1] = arr[j]
j -= 1
arr[j + 1] = key
return arr
def merge_sort(arr):
"""
Performs merge sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
array_len = len(arr)
if array_len > 1:
mid = array_len // 2
left_half = arr[:mid]
right_half = arr[mid:]
merge_sort(left_half)
merge_sort(right_half)
i = j = k = 0
while i < len(left_half) and j < len(right_half):
if left_half[i] < right_half[j]:
arr[k] = left_half[i]
i += 1
else:
arr[k] = right_half[j]
j += 1
k += 1
while i < len(left_half):
arr[k] = left_half[i]
i += 1
k += 1
while j < len(right_half):
arr[k] = right_half[j]
j += 1
k += 1
return arr
def quick_sort(arr):
"""
Performs quick sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
array_len = len(arr)
if array_len <= 1:
return arr
pivot = arr[array_len // 2]
left = [x for x in arr if x < pivot]
middle = [x for x in arr if x == pivot]
right = [x for x in arr if x > pivot]
return quick_sort(left) + middle + quick_sort(right)
def bucket_sort(arr):
"""
Performs bucket sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
max_val = max(arr)
size = max_val / len(arr)
buckets = [[] for _ in range(len(arr))]
for i in range(len(arr)):
j = int(arr[i] / size)
if j != len(arr):
buckets[j].append(arr[i])
else:
buckets[len(arr) - 1].append(arr[i])
for i in range(len(arr)):
insertion_sort(buckets[i])
return [item for bucket in buckets for item in bucket]
def radix_sort(arr):
"""
Performs radix sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
max_val = max(arr)
exp = 1
while max_val // exp > 0:
counting_sort(arr, exp)
exp *= 10
return arr
def counting_sort(arr, exp):
"""
Performs counting sort on an array to sort it.
Args:
arr (array): The array we are given.
Returns:
array: The sorted version of the array.
"""
array_len = len(arr)
output = [0] * array_len
count = [0] * 10
for i in range(array_len):
index = arr[i] // exp
count[index % 10] += 1
for i in range(1, 10):
count[i] += count[i - 1]
i = array_len - 1
while i >= 0:
index = arr[i] // exp
output[count[index % 10] - 1] = arr[i]
count[index % 10] -= 1
i -= 1
for i in range(array_len):
arr[i] = output[i]
def two_pointer_sort(arr1, arr2):
"""
Sorts two sorted arrays using a two-pointer algorithm.
Args:
arr1 (array): The first sorted array.
arr2 (array): The second sorted array.
Returns:
array: A sorted array that merges the two input arrays.
"""
sorted_arr = []
i, j = 0, 0
while i < len(arr1) and j < len(arr2):
if arr1[i] < arr2[j]:
sorted_arr.append(arr1[i])
i += 1
else:
sorted_arr.append(arr2[j])
j += 1
while i < len(arr1):
sorted_arr.append(arr1[i])
i += 1
while j < len(arr2):
sorted_arr.append(arr2[j])
j += 1
return sorted_arr
def heapify(arr, n, i):
"""
A helper function to create a max heap.
Args:
arr (array): The input array.
n (int): The size of the heap.
i (int): The index to start heapifying from.
"""
largest = i
left = 2 * i + 1
right = 2 * i + 2
if left < n and arr[i] < arr[left]:
largest = left
if right < n and arr[largest] < arr[right]:
largest = right
if largest != i:
arr[i], arr[largest] = arr[largest], arr[i]
heapify(arr, n, largest)
def heapsort(arr):
"""
Sorts an input array using heapsort.
Args:
arr (array): The input array to sort.
Returns:
array: The sorted array.
"""
n = len(arr)
for i in range(n // 2 - 1, -1, -1):
heapify(arr, n, i)
for i in range(n - 1, 0, -1):
arr[i], arr[0] = arr[0], arr[i]
heapify(arr, i, 0)
return arr
|
AlgoSolver
|
/AlgoSolver-0.1.4.tar.gz/AlgoSolver-0.1.4/Algo_Solver/sorting.py
|
sorting.py
|
import datetime as dt
import inspect
import logging
import warnings
from copy import deepcopy
from typing import Iterable, Optional, Tuple, Union
import builtins
import copy
from abc import ABC, ABCMeta, abstractmethod
from collections import namedtuple
from dataclasses import Field, InitVar, MISSING, dataclass, field, fields, replace
from enum import EnumMeta
from functools import update_wrapper
from typing import Iterable, Mapping, Optional, Union, Tuple
import numpy as np
from dataclasses_json import config, global_config
from dataclasses_json.core import _decode_generic, _is_supported_generic
from inflection import camelize, underscore
# from dataclasses_json import global_config
_logger = logging.getLogger(__name__)
__builtins = set(dir(builtins))
__getattribute__ = object.__getattribute__
__setattr__ = object.__setattr__
_rename_cache = {}
def exclude_none(o):
return o is None
def exlude_always(_o):
return True
def is_iterable(o, t):
return isinstance(o, Iterable) and all(isinstance(it, t) for it in o)
def is_instance_or_iterable(o, t):
return isinstance(o, t) or is_iterable(o, t)
def _get_underscore(arg):
if arg not in _rename_cache:
_rename_cache[arg] = underscore(arg)
return _rename_cache[arg]
def handle_camel_case_args(cls):
init = cls.__init__
def wrapper(self, *args, **kwargs):
normalised_kwargs = {}
for arg, value in kwargs.items():
if not arg.isupper():
snake_case_arg = _get_underscore(arg)
if snake_case_arg != arg and snake_case_arg in kwargs:
raise ValueError('{} and {} both specified'.format(arg, snake_case_arg))
arg = snake_case_arg
arg = cls._field_mappings().get(arg, arg)
normalised_kwargs[arg] = value
return init(self, *args, **normalised_kwargs)
cls.__init__ = update_wrapper(wrapper=wrapper, wrapped=init)
return cls
field_metadata = config(exclude=exclude_none)
name_metadata = config(exclude=exlude_always)
class HashableDict(dict):
@staticmethod
def hashables(in_dict) -> Tuple:
hashables = []
for it in in_dict.items():
if isinstance(it[1], dict):
hashables.append((it[0], HashableDict.hashables(it[1])))
else:
hashables.append(it)
return tuple(hashables)
def __hash__(self):
return hash(HashableDict.hashables(self))
class DictBase(HashableDict):
_PROPERTIES = set()
def __init__(self, *args, **kwargs):
if self._PROPERTIES:
invalid_arg = next((k for k in kwargs.keys() if k not in self._PROPERTIES), None)
if invalid_arg is not None:
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{invalid_arg}'")
super().__init__(*args, **{camelize(k, uppercase_first_letter=False): v for k, v in kwargs.items()
if v is not None})
def __getitem__(self, item):
return super().__getitem__(camelize(item, uppercase_first_letter=False))
def __setitem__(self, key, value):
if value is not None:
return super().__setitem__(camelize(key, uppercase_first_letter=False), value)
def __getattr__(self, item):
if self._PROPERTIES:
if _get_underscore(item) in self._PROPERTIES:
return self.get(item)
elif item in self:
return self[item]
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{item}'")
def __setattr__(self, key, value):
if key in dir(self):
return super().__setattr__(key, value)
elif self._PROPERTIES and _get_underscore(key) not in self._PROPERTIES:
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{key}'")
self[key] = value
@classmethod
def properties(cls) -> set:
return cls._PROPERTIES
class Base(ABC):
"""The base class for all generated classes"""
__fields_by_name = None
__field_mappings = None
def __getattr__(self, item):
fields_by_name = __getattribute__(self, '_fields_by_name')()
if item.startswith('_') or item in fields_by_name:
return __getattribute__(self, item)
# Handle setting via camelCase names (legacy behaviour) and field mappings from disallowed names
snake_case_item = _get_underscore(item)
field_mappings = __getattribute__(self, '_field_mappings')()
snake_case_item = field_mappings.get(snake_case_item, snake_case_item)
try:
return __getattribute__(self, snake_case_item)
except AttributeError:
return __getattribute__(self, item)
def __setattr__(self, key, value):
# Handle setting via camelCase names (legacy behaviour)
snake_case_key = _get_underscore(key)
snake_case_key = self._field_mappings().get(snake_case_key, snake_case_key)
fld = self._fields_by_name().get(snake_case_key)
if fld:
if not fld.init:
raise ValueError(f'{key} cannot be set')
key = snake_case_key
value = self.__coerce_value(fld.type, value)
__setattr__(self, key, value)
def __repr__(self):
if self.name is not None:
return f'{self.name} ({self.__class__.__name__})'
return super().__repr__()
@classmethod
def __coerce_value(cls, typ: type, value):
if isinstance(value, np.generic):
# Handle numpy types
return value.item()
elif hasattr(value, 'tolist'):
# tolist converts scalar or array to native python type if not already native.
return value()
elif typ in (DictBase, Optional[DictBase]) and isinstance(value, Base):
return value.to_dict()
if _is_supported_generic(typ):
return _decode_generic(typ, value, False)
else:
return value
@classmethod
def _fields_by_name(cls) -> Mapping[str, Field]:
if cls is Base:
return {}
if cls.__fields_by_name is None:
cls.__fields_by_name = {f.name: f for f in fields(cls)}
return cls.__fields_by_name
@classmethod
def _field_mappings(cls) -> Mapping[str, str]:
if cls is Base:
return {}
if cls.__field_mappings is None:
field_mappings = {}
for fld in fields(cls):
config_fn = fld.metadata.get('dataclasses_json', {}).get('letter_case')
if config_fn:
mapped_name = config_fn('field_name')
if mapped_name:
field_mappings[mapped_name] = fld.name
cls.__field_mappings = field_mappings
return cls.__field_mappings
def clone(self, **kwargs):
"""
Clone this object, overriding specified values
:param kwargs: property names and values, e.g. swap.clone(fixed_rate=0.01)
**Examples**
To change the market data location of the default context:
>>> from gs_quant.instrument import IRCap
>>> cap = IRCap('5y', 'GBP')
>>>
>>> new_cap = cap.clone(cap_rate=0.01)
"""
return replace(self, **kwargs)
@classmethod
def properties(cls) -> set:
"""The public property names of this class"""
return set(f[:-1] if f[-1] == '_' else f for f in cls._fields_by_name().keys())
@classmethod
def properties_init(cls) -> set:
"""The public property names of this class"""
return set(f[:-1] if f[-1] == '_' else f for f, v in cls._fields_by_name().items() if v.init)
def as_dict(self, as_camel_case: bool = False) -> dict:
"""Dictionary of the public, non-null properties and values"""
# to_dict() converts all the values to JSON type, does camel case and name mappings
# asdict() does not convert values or case of the keys or do name mappings
ret = {}
field_mappings = {v: k for k, v in self._field_mappings().items()}
for key in self.__fields_by_name.keys():
value = __getattribute__(self, key)
key = field_mappings.get(key, key)
if value is not None:
if as_camel_case:
key = camelize(key, uppercase_first_letter=False)
ret[key] = value
return ret
@classmethod
def default_instance(cls):
"""
Construct a default instance of this type
"""
required = {f.name: None if f.default == MISSING else f.default for f in fields(cls) if f.init}
return cls(**required)
def from_instance(self, instance):
"""
Copy the values from an existing instance of the same type to our self
:param instance: from which to copy:
:return:
"""
if not isinstance(instance, type(self)):
raise ValueError('Can only use from_instance with an object of the same type')
for fld in fields(self.__class__):
if fld.init:
__setattr__(self, fld.name, __getattribute__(instance, fld.name))
class InstrumentBase(Base, ABC):
quantity_: InitVar[float] = field(default=1, init=False)
@property
@abstractmethod
def provider(self):
...
@property
def instrument_quantity(self) -> float:
return self.quantity_
"""
@property
def resolution_key(self) -> Optional[RiskKey]:
try:
return self.__resolution_key
except AttributeError:
return None
"""
@property
def unresolved(self):
try:
return self.__unresolved
except AttributeError:
return None
@property
def metadata(self):
try:
return self.__metadata
except AttributeError:
return None
@metadata.setter
def metadata(self, value):
self.__metadata = value
def from_instance(self, instance):
self.__resolution_key = None
super().from_instance(instance)
self.__unresolved = instance.__unresolved
self.__resolution_key = instance.__resolution_key
def resolved(self, values: dict, resolution_key: RiskKey):
all_values = self.as_dict(True)
all_values.update(values)
new_instrument = self.from_dict(all_values)
new_instrument.name = self.name
new_instrument.__unresolved = copy.copy(self)
new_instrument.__resolution_key = resolution_key
return new_instrument
def clone(self, **kwargs):
new_instrument = super().clone(**kwargs)
new_instrument.__unresolved = self.unresolved
new_instrument.metadata = self.metadata
new_instrument.__resolution_key = self.resolution_key
return new_instrument
class Instrument(object):
PROVIDER = 'local'
__instrument_mappings = {}
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/domain/instrument/core.py
|
core.py
|
import importlib
import importlib.util
from typing import Any, Optional
import fsspec
import msgspec
def resolve_path(path: str):
module, cls = path.rsplit(":", maxsplit=1)
mod = importlib.import_module(module)
cls = getattr(mod, cls)
return cls
class AlgoVisionConfig(msgspec.Struct, kw_only=True, frozen=True):
"""
Base class for AlgoVision configurations.
"""
@classmethod
def fully_qualified_name(cls) -> str:
"""
Return the fully qualified name for the `NautilusConfig` class.
Returns
-------
str
References
----------
https://www.python.org/dev/peps/pep-3155/
"""
return cls.__module__ + ":" + cls.__qualname__
def dict(self) -> dict[str, Any]:
"""
Return a dictionary representation of the configuration.
Returns
-------
dict[str, Any]
"""
return {k: getattr(self, k) for k in self.__struct_fields__}
def json(self) -> bytes:
"""
Return serialized JSON encoded bytes.
Returns
-------
bytes
"""
return msgspec.json.encode(self)
@classmethod
def parse(cls, raw: bytes) -> Any:
"""
Return a decoded object of the given `cls`.
Parameters
----------
cls : type
The type to decode to.
raw : bytes
The raw bytes to decode.
Returns
-------
Any
"""
return msgspec.json.decode(raw, type=cls)
def validate(self) -> bool:
"""
Return whether the configuration can be represented as valid JSON.
Returns
-------
bool
"""
return bool(msgspec.json.decode(self.json(), type=self.__class__))
class InstrumentProviderConfig(AlgoVisionConfig, frozen=True):
"""
Configuration for an instrument provider.
Parameters
----------
load_all : bool, default False
If all venue instruments should be loaded on start.
load_ids : FrozenSet[str], optional
The list of instrument IDs to be loaded on start (if `load_all_instruments` is False).
filters : frozendict, optional
The venue specific instrument loading filters to apply.
filter_callable: str, optional
A fully qualified path to a callable that takes a single argument, `instrument` and returns a bool, indicating
whether the instrument should be loaded
log_warnings : bool, default True
If parser warnings should be logged.
"""
def __eq__(self, other):
return (
self.load_all == other.load_all
and self.load_ids == other.load_ids
and self.filters == other.filters
)
def __hash__(self):
return hash((self.load_all, self.load_ids, self.filters))
load_all: bool = False
load_ids: Optional[frozenset[str]] = None
filters: Optional[dict[str, Any]] = None
filter_callable: Optional[str] = None
log_warnings: bool = True
class DataEngineConfig(AlgoVisionConfig, frozen=True):
"""
Configuration for ``DataEngine`` instances.
Parameters
----------
time_bars_build_with_no_updates : bool, default True
If time bar aggregators will build and emit bars with no new market updates.
time_bars_timestamp_on_close : bool, default True
If time bar aggregators will timestamp `ts_event` on bar close.
If False then will timestamp on bar open.
validate_data_sequence : bool, default False
If data objects timestamp sequencing will be validated and handled.
debug : bool, default False
If debug mode is active (will provide extra debug logging).
"""
time_bars_build_with_no_updates: bool = True
time_bars_timestamp_on_close: bool = True
validate_data_sequence: bool = False
debug: bool = False
class ExecEngineConfig(AlgoVisionConfig, frozen=True):
"""
Configuration for ``ExecutionEngine`` instances.
Parameters
----------
load_cache : bool, default True
If the cache should be loaded on initialization.
allow_cash_positions : bool, default True
If unleveraged spot/cash assets should generate positions.
filter_unclaimed_external_orders : bool, default False
If unclaimed order events with an EXTERNAL strategy ID should be filtered/dropped.
debug : bool, default False
If debug mode is active (will provide extra debug logging).
"""
load_cache: bool = True
allow_cash_positions: bool = True
filter_unclaimed_external_orders: bool = False
debug: bool = False
class StreamingConfig(AlgoVisionConfig, frozen=True):
"""
Configuration for streaming live or backtest runs to the catalog in feather format.
Parameters
----------
catalog_path : str
The path to the data catalog.
fs_protocol : str, optional
The `fsspec` filesystem protocol for the catalog.
fs_storage_options : dict, optional
The `fsspec` storage options.
flush_interval_ms : int, optional
The flush interval (milliseconds) for writing chunks.
replace_existing: bool, default False
If any existing feather files should be replaced.
"""
catalog_path: str
fs_protocol: Optional[str] = None
fs_storage_options: Optional[dict] = None
flush_interval_ms: Optional[int] = None
replace_existing: bool = False
include_types: Optional[list[str]] = None
@property
def fs(self):
return fsspec.filesystem(protocol=self.fs_protocol, **(self.fs_storage_options or {}))
def as_catalog(self) -> ParquetDataCatalog:
return ParquetDataCatalog(
path=self.catalog_path,
fs_protocol=self.fs_protocol,
fs_storage_options=self.fs_storage_options,
)
class DataCatalogConfig(NautilusConfig, frozen=True):
"""
Configuration for a data catalog.
Parameters
----------
path : str
The path to the data catalog.
fs_protocol : str, optional
The fsspec file system protocol for the data catalog.
fs_storage_options : dict, optional
The fsspec storage options for the data catalog.
use_rust : bool, default False
If queries will be for Rust schema versions (when implemented).
"""
path: str
fs_protocol: Optional[str] = None
fs_storage_options: Optional[dict] = None
use_rust: bool = False
class ActorConfig(NautilusConfig, kw_only=True, frozen=True):
"""
The base model for all actor configurations.
Parameters
----------
component_id : str, optional
The component ID. If ``None`` then the identifier will be taken from
`type(self).__name__`.
"""
component_id: Optional[str] = None
class ImportableActorConfig(NautilusConfig, frozen=True):
"""
Configuration for an actor instance.
Parameters
----------
actor_path : str
The fully qualified name of the Actor class.
config_path : str
The fully qualified name of the Actor Config class.
config : dict
The actor configuration.
"""
actor_path: str
config_path: str
config: dict
class ActorFactory:
"""
Provides actor creation from importable configurations.
"""
@staticmethod
def create(config: ImportableActorConfig):
"""
Create an actor from the given configuration.
Parameters
----------
config : ImportableActorConfig
The configuration for the building step.
Returns
-------
Actor
Raises
------
TypeError
If `config` is not of type `ImportableActorConfig`.
"""
PyCondition.type(config, ImportableActorConfig, "config")
actor_cls = resolve_path(config.actor_path)
config_cls = resolve_path(config.config_path)
return actor_cls(config=config_cls(**config.config))
class StrategyConfig(NautilusConfig, kw_only=True, frozen=True):
"""
The base model for all trading strategy configurations.
Parameters
----------
strategy_id : str, optional
The unique ID for the strategy. Will become the strategy ID if not None.
order_id_tag : str, optional
The unique order ID tag for the strategy. Must be unique
amongst all running strategies for a particular trader ID.
oms_type : OmsType, optional
The order management system type for the strategy. This will determine
how the `ExecutionEngine` handles position IDs (see docs).
external_order_claims : list[str], optional
The external order claim instrument IDs.
"""
strategy_id: Optional[str] = None
order_id_tag: Optional[str] = None
oms_type: Optional[str] = None
external_order_claims: Optional[list[str]] = None
class ImportableStrategyConfig(NautilusConfig, frozen=True):
"""
Configuration for a trading strategy instance.
Parameters
----------
strategy_path : str
The fully qualified name of the strategy class.
config_path : str
The fully qualified name of the config class.
config : dict[str, Any]
The strategy configuration.
"""
strategy_path: str
config_path: str
config: dict[str, Any]
class StrategyFactory:
"""
Provides strategy creation from importable configurations.
"""
@staticmethod
def create(config: ImportableStrategyConfig):
"""
Create a trading strategy from the given configuration.
Parameters
----------
config : ImportableStrategyConfig
The configuration for the building step.
Returns
-------
Strategy
Raises
------
TypeError
If `config` is not of type `ImportableStrategyConfig`.
"""
PyCondition.type(config, ImportableStrategyConfig, "config")
strategy_cls = resolve_path(config.strategy_path)
config_cls = resolve_path(config.config_path)
return strategy_cls(config=config_cls(**config.config))
class ExecAlgorithmConfig(NautilusConfig, kw_only=True, frozen=True):
"""
The base model for all execution algorithm configurations.
Parameters
----------
exec_algorithm_id : str, optional
The unique ID for the execution algorithm.
If not ``None`` then will become the execution algorithm ID.
"""
exec_algorithm_id: Optional[str] = None
class ImportableExecAlgorithmConfig(NautilusConfig, frozen=True):
"""
Configuration for an execution algorithm instance.
Parameters
----------
exec_algorithm_path : str
The fully qualified name of the execution algorithm class.
config_path : str
The fully qualified name of the config class.
config : dict[str, Any]
The execution algorithm configuration.
"""
exec_algorithm_path: str
config_path: str
config: dict[str, Any]
class ExecAlgorithmFactory:
"""
Provides execution algorithm creation from importable configurations.
"""
@staticmethod
def create(config: ImportableExecAlgorithmConfig):
"""
Create an execution algorithm from the given configuration.
Parameters
----------
config : ImportableExecAlgorithmConfig
The configuration for the building step.
Returns
-------
ExecAlgorithm
Raises
------
TypeError
If `config` is not of type `ImportableExecAlgorithmConfig`.
"""
PyCondition.type(config, ImportableExecAlgorithmConfig, "config")
exec_algorithm_cls = resolve_path(config.exec_algorithm_path)
config_cls = resolve_path(config.config_path)
return exec_algorithm_cls(config=config_cls(**config.config))
class LoggingConfig(NautilusConfig, frozen=True):
"""
Configuration for standard output and file logging for a ``NautilusKernel`` instance.
Parameters
----------
log_level : str, default "INFO"
The minimum log level to write to stdout.
Will always write ERROR level logs to stderr (unless `bypass_logging` is True).
log_level_file : str, optional
The minimum log level to write to a log file.
If ``None`` then no file logging will occur.
log_directory : str, optional
The path to the log file directory.
If ``None`` then will write to the current working directory.
log_file_name : str, optional
The custom log file name (will use a '.log' suffix for plain text or '.json' for JSON).
This will override automatic naming, and no daily file rotation will occur.
log_file_format : str { 'JSON' }, optional
The log file format. If ``None`` (default) then will log in plain text.
log_component_levels : dict[str, LogLevel]
The additional per component log level filters, where keys are component
IDs (e.g. actor/strategy IDs) and values are log levels.
bypass_logging : bool, default False
If all logging should be bypassed.
"""
log_level: str = "INFO"
log_level_file: Optional[str] = None
log_directory: Optional[str] = None
log_file_name: Optional[str] = None
log_file_format: Optional[str] = None
log_component_levels: Optional[dict[str, str]] = None
bypass_logging: bool = False
class NautilusKernelConfig(NautilusConfig, frozen=True):
"""
Configuration for a ``NautilusKernel`` core system instance.
Parameters
----------
environment : Environment { ``BACKTEST``, ``SANDBOX``, ``LIVE`` }
The kernel environment context.
trader_id : str
The trader ID for the kernel (must be a name and ID tag separated by a hyphen).
cache : CacheConfig, optional
The cache configuration.
cache_database : CacheDatabaseConfig, optional
The cache database configuration.
data_engine : DataEngineConfig, optional
The live data engine configuration.
risk_engine : RiskEngineConfig, optional
The live risk engine configuration.
exec_engine : ExecEngineConfig, optional
The live execution engine configuration.
streaming : StreamingConfig, optional
The configuration for streaming to feather files.
catalog : DataCatalogConfig, optional
The data catalog config.
actors : list[ImportableActorConfig]
The actor configurations for the kernel.
strategies : list[ImportableStrategyConfig]
The strategy configurations for the kernel.
load_state : bool, default True
If trading strategy state should be loaded from the database on start.
save_state : bool, default True
If trading strategy state should be saved to the database on stop.
loop_debug : bool, default False
If the asyncio event loop should be in debug mode.
timeout_connection : PositiveFloat (seconds)
The timeout for all clients to connect and initialize.
timeout_reconciliation : PositiveFloat (seconds)
The timeout for execution state to reconcile.
timeout_portfolio : PositiveFloat (seconds)
The timeout for portfolio to initialize margins and unrealized PnLs.
timeout_disconnection : PositiveFloat (seconds)
The timeout for all engine clients to disconnect.
timeout_post_stop : PositiveFloat (seconds)
The timeout after stopping the node to await residual events before final shutdown.
"""
environment: Environment
trader_id: str
instance_id: Optional[str] = None
cache: Optional[CacheConfig] = None
cache_database: Optional[CacheDatabaseConfig] = None
data_engine: Optional[DataEngineConfig] = None
risk_engine: Optional[RiskEngineConfig] = None
exec_engine: Optional[ExecEngineConfig] = None
streaming: Optional[StreamingConfig] = None
catalog: Optional[DataCatalogConfig] = None
actors: list[ImportableActorConfig] = []
strategies: list[ImportableStrategyConfig] = []
exec_algorithms: list[ImportableExecAlgorithmConfig] = []
load_state: bool = False
save_state: bool = False
loop_debug: bool = False
logging: Optional[LoggingConfig] = None
timeout_connection: PositiveFloat = 10.0
timeout_reconciliation: PositiveFloat = 10.0
timeout_portfolio: PositiveFloat = 10.0
timeout_disconnection: PositiveFloat = 10.0
timeout_post_stop: PositiveFloat = 10.0
class ImportableFactoryConfig(NautilusConfig, frozen=True):
"""
Represents an importable (JSON) factory config.
"""
path: str
def create(self):
cls = resolve_path(self.path)
return cls()
class ImportableConfig(NautilusConfig, frozen=True):
"""
Represents an importable configuration (typically live data client or live execution client).
"""
path: str
config: dict = {}
factory: Optional[ImportableFactoryConfig] = None
@staticmethod
def is_importable(data: dict):
return set(data) == {"path", "config"}
def create(self):
assert ":" in self.path, "`path` variable should be of the form `path.to.module:class`"
cls = resolve_path(self.path)
cfg = msgspec.json.encode(self.config)
return msgspec.json.decode(cfg, type=cls)
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/config/base.py
|
base.py
|
import logging
import time
import asyncio
from asyncio import Queue, CancelledError
from contextlib import asynccontextmanager, suppress
from typing import List, Union, AsyncIterable
from decimal import Decimal
import atexit
from dataclasses import dataclass
from aiohttp.client_reqrep import ClientResponse
import requests
import websockets
import aiohttp
from aiohttp.typedefs import StrOrURL
# from yapic import json as json_parser
LOG = logging.getLogger('feedhandler')
class Connection:
raw_data_callback = None
async def read(self) -> bytes:
raise NotImplementedError
async def write(self, msg: str):
raise NotImplementedError
class AsyncConnection(Connection):
conn_count: int =0
def __init__(self, conn_id: str, authentication=None, subscription=None):
"""
conn_id: str
the unique identifier for the connection
authentication: Callable
function pointer that will be invoked directly before the connection
is attempted. Some connections may need to do authentication at this point.
subscription: dict
optional connection information
"""
AsyncConnection.conn_count += 1
self.id: str = conn_id
self.received: int = 0
self.sent: int = 0
self.last_message = None
self.authentication = authentication
self.subscription = subscription
self.conn: Union[websockets.WebSocketClientProtocol, aiohttp.ClientSession] = None
atexit.register(self.__del__)
def __del__(self):
# best effort clean up. Shutdown should be called on Feed/Exchange classes
# and any user of the Async connection should use a context manager (via connect)
# or call close manually. If not, we *might* be able to clean up the connection on exit
try:
if self.is_open:
asyncio.ensure_future(self.close())
except (RuntimeError, RuntimeWarning):
# no event loop, ignore error
pass
@property
def uuid(self):
return self.id
@asynccontextmanager
async def connect(self):
await self._open()
try:
yield self
finally:
await self.close()
async def _open(self):
raise NotImplementedError
@property
def is_open(self) -> bool:
raise NotImplementedError
async def close(self):
if self.is_open:
conn = self.conn
self.conn = None
await conn.close()
LOG.info('%s: closed connection %r', self.id, conn.__class__.__name__)
class WSAsyncConn(AsyncConnection):
def __init__(self, address: str, conn_id: str, authentication=None, subscription=None, **kwargs):
"""
address: str
the websocket address to connect to
conn_id: str
the identifier of this connection
kwargs:
passed into the websocket connection.
"""
if not address.startswith("wss://"):
raise ValueError(f'Invalid address, must be a wss address. Provided address is: {address!r}')
self.address = address
super().__init__(f'{conn_id}.ws.{self.conn_count}', authentication=authentication, subscription=subscription)
self.ws_kwargs = kwargs
@property
def is_open(self) -> bool:
return self.conn and not self.conn.closed
async def _open(self):
if self.is_open:
LOG.warning('%s: websocket already open', self.id)
else:
LOG.debug('%s: connecting to %s', self.id, self.address)
if self.raw_data_callback:
await self.raw_data_callback(None, time.time(), self.id, connect=self.address)
if self.authentication:
self.address, self.ws_kwargs = await self.authentication(self.address, self.ws_kwargs)
self.conn = await websockets.connect(self.address, **self.ws_kwargs)
self.sent = 0
self.received = 0
self.last_message = None
async def read(self) -> AsyncIterable:
if not self.is_open:
LOG.error('%s: connection closed in read()', id(self))
# raise ConnectionClosed
pass
if self.raw_data_callback:
async for data in self.conn:
self.received += 1
self.last_message = time.time()
await self.raw_data_callback(data, self.last_message, self.id)
yield data
else:
async for data in self.conn:
self.received += 1
self.last_message = time.time()
yield data
async def write(self, data: str):
if not self.is_open:
# raise ConnectionClosed
pass
if self.raw_data_callback:
await self.raw_data_callback(data, time.time(), self.id, send=self.address)
await self.conn.send(data)
self.sent += 1
@dataclass
class WebsocketEndpoint:
address: str
sandbox: str = None
instrument_filter: str = None
channel_filter: str = None
limit: int = None
options: dict = None
authentication: bool = None
def __post_init__(self):
defaults = {'ping_interval': 10, 'ping_timeout': None, 'max_size': 2**23, 'max_queue': None, 'read_limit': 2**18}
if self.options:
defaults.update(self.options)
self.options = defaults
def subscription_filter(self, sub: dict) -> dict:
if not self.instrument_filter and not self.channel_filter:
return sub
ret = {}
for chan, syms in sub.items():
if self.channel_filter and chan not in self.channel_filter:
continue
ret[chan] = []
if not self.instrument_filter:
ret[chan].extend(sub[chan])
else:
if self.instrument_filter[0] == 'TYPE':
# ret[chan].extend([s for s in syms if str_to_symbol(s).type in self.instrument_filter[1]])
ret[chan].extend([s for s in syms if s.type in self.instrument_filter[1]])
elif self.instrument_filter[0] == 'QUOTE':
# ret[chan].extend([s for s in syms if str_to_symbol(s).quote in self.instrument_filter[1]])
ret[chan].extend([s for s in syms if s.quote in self.instrument_filter[1]])
else:
raise ValueError('Invalid instrument filter type specified')
return ret
def get_address(self, sandbox=False):
if sandbox and self.sandbox:
return self.sandbox
return self.address
@dataclass
class Routes:
instruments: Union[str, list]
currencies: str = None
funding: str = None
open_interest: str = None
liquidations: str = None
stats: str = None
authentication: str = None
l2book: str = None
l3book: str = None
@dataclass
class RestEndpoint:
address: str
sandbox: str = None
instrument_filter: str = None
routes: Routes = None
def route(self, ep, sandbox=False):
endpoint = self.routes.__getattribute__(ep)
api = self.sandbox if sandbox and self.sandbox else self.address
return api + endpoint if isinstance(endpoint, str) else [api + e for e in endpoint]
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/broker/connection.py
|
connection.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# import functools
import collections
from copy import copy, deepcopy
from datetime import date, datetime, timedelta
import inspect
import itertools
import random
import threading
import time
from backtrader import TimeFrame, Position
from backtrader.metabase import MetaParams
from backtrader.utils.py3 import bytes, bstr, queue, with_metaclass, long
from backtrader.utils import AutoDict, UTC
import bisect
bytes = bstr # py2/3 need for ibpy
from ibapi.client import EClient
from ibapi.wrapper import EWrapper
from ibapi.contract import Contract
from ibapi.ticktype import TickTypeEnum
import logging
logger = logging.getLogger(__name__)
ENABLE_DEBUG = True
def _ts2dt(tstamp=None):
# Transforms a RTVolume timestamp to a datetime object
if not tstamp:
return datetime.utcnow()
sec, msec = divmod(long(tstamp), 1000)
usec = msec * 1000
return datetime.utcfromtimestamp(sec).replace(microsecond=usec)
class ErrorMsg(object):
def __init__(self, reqId, errorCode, errorString, advancedOrderRejectJson):
self.vars = vars()
del self.vars['self']
self.reqId = reqId
self.errorCode = errorCode
self.errorString = errorString
self.advancedOrderRejectJson = advancedOrderRejectJson
def __str__(self):
return f'{self.vars}'
class OpenOrderMsg(object):
def __init__(self, orderId, contract, order, orderState):
self.vars = vars()
del self.vars['self']
self.orderId = orderId
self.contract = contract
self.order = order
self.orderState = orderState
def __str__(self):
return f'{self.vars}'
class OrderStatusMsg(object):
def __init__(self, orderId, status, filled,
remaining, avgFillPrice, permId,
parentId, lastFillPrice, clientId,
whyHeld, mktCapPrice):
self.vars = vars()
self.orderId = orderId
self.status = status
self.filled = filled
self.remaining = remaining
self.avgFillPrice = avgFillPrice
self.permId = permId
self.parentId = parentId
self.lastFillPrice = lastFillPrice
self.clientId = clientId
self.whyHeld = whyHeld
self.mktCapPrice = mktCapPrice
def __str__(self):
return f'{self.vars}'
class RTVolume(object):
'''Parses a tickString tickType 48 (RTVolume) event from the IB API into its
constituent fields
Supports using a "price" to simulate an RTVolume from a tickPrice event
'''
_fields = [
('price', float),
('size', float),
('datetime', _ts2dt),
('volume', float),
('vwap', float),
('single', bool)
]
def __init__(self, rtvol='', price=None, tmoffset=None):
self.vars = vars()
# Use a provided string or simulate a list of empty tokens
tokens = iter(rtvol.split(';'))
# Put the tokens as attributes using the corresponding func
for name, func in self._fields:
setattr(self, name, func(next(tokens)) if rtvol else func())
# If price was provided use it
if price is not None:
self.price = price
if tmoffset is not None:
self.datetime += tmoffset
def __str__(self):
return f'{self.vars}'
class RTPrice(object):
'''Set price from a tickPrice
'''
def __init__(self, price, tmoffset=None):
self.vars = vars()
# No size for tickPrice
self.size = None
# Set the price
self.price = price
# Set price to when we received it
self.datetime = datetime.now()
if tmoffset is not None:
self.datetime += tmoffset
def __str__(self):
return f'{self.vars}'
class RTSize(object):
'''Set size from a tickSize
'''
def __init__(self, size, tmoffset=None):
self.vars = vars()
# No size for tickPrice
self.price = None
# Set the size
self.size = size
# Set price to when we received it
self.datetime = datetime.now()
if tmoffset is not None:
self.datetime += tmoffset
def __str__(self):
return f'{self.vars}'
class RTBar(object):
'''Set realtimeBar object
'''
def __init__(self, reqId, time, open_, high, low, close, volume, wap, count):
self.vars = vars()
self.reqId = reqId
self.time = time
self.open = open_
self.high = high
self.low = low
self.close = close
self.volume = volume
self.wap = wap
self.count = count
def __str__(self):
return f'{self.vars}'
class HistBar(object):
'''Set historicalBar object
'''
def __init__(self, reqId, bar):
self.vars = vars()
self.reqId = reqId
self.date = bar.date
self.open = bar.open
self.high = bar.high
self.low = bar.low
self.close = bar.close
self.volume = bar.volume
# self.wap = bar.wap
self.count = bar.barCount
def __str__(self):
return f'{self.vars}'
class HistTick(object):
'''Set historicalTick object: 'MIDPOINT', 'BID_ASK', 'TRADES'
'''
def __init__(self, tick, dataType):
self.vars = vars()
self.date = datetime.utcfromtimestamp(tick.time)
self.tickType = tick.tickType if hasattr(tick, 'tickType') else int(0)
self.dataType = dataType
if dataType == 'RT_TICK_MIDPOINT':
self.price = tick.price
elif dataType == 'RT_TICK_LAST':
self.price = tick.price
self.size = float(tick.size)
self.unreported = tick.tickAttribLast.unreported
self.pastlimit = tick.tickAttribLast.pastLimit
elif dataType == 'RT_TICK_BID_ASK':
self.bidPrice = tick.priceBid
self.askPrice = tick.priceAsk
self.bidSize = float(tick.sizeBid)
self.askSize = float(tick.sizeAsk)
# self.exchange = tick.exchange
# self.specialconditions = tick.tickAttribLast.specialConditions
def __str__(self):
return f'{self.vars}'
class RTTickLast(object):
'''Set realtimeTick object: 'TRADES'
'''
def __init__(self, tickType, time, price, size, tickAtrribLast, exchange, specialConditions):
self.vars = vars()
self.dataType = "RT_TICK_LAST"
self.datetime = datetime.utcfromtimestamp(time)
# self.tickType = TickTypeEnum.to_str(tickType)
self.tickType = tickType
self.price = price
self.size = float(size)
self.pastlimit = tickAtrribLast.pastLimit
self.unreported = tickAtrribLast.unreported
# self.exchange = exchange
# self.specialConditions = specialConditions
def __str__(self):
return f'{self.vars}'
class RTTickBidAsk(object):
'''Set realtimeTick object: 'MIDPOINT', 'BID_ASK', 'TRADES'
'''
def __init__(self, time, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk):
self.vars = vars()
self.dataType = "RT_TICK_BID_ASK"
self.datetime = datetime.utcfromtimestamp(time)
self.bidPrice = bidPrice
self.askPrice = askPrice
self.bidSize = float(bidSize)
self.askSize = float(askSize)
self.bidPastLow = tickAttribBidAsk.bidPastLow
self.askPastHigh = tickAttribBidAsk.askPastHigh
def __str__(self):
return f'{self.vars}'
class RTTickMidPoint(object):
'''Set realtimeTick object: 'MIDPOINT'
'''
def __init__(self, time, midPoint):
self.vars = vars()
self.dataType = "RT_TICK_MIDPOINT"
self.datetime = datetime.utcfromtimestamp(time)
self.midPoint = midPoint
def __str__(self):
return f'{self.vars}'
class MetaSingleton(MetaParams):
'''Metaclass to make a metaclassed class a singleton'''
def __init__(cls, name, bases, dct):
super(MetaSingleton, cls).__init__(name, bases, dct)
cls._singleton = None
def __call__(cls, *args, **kwargs):
if cls._singleton is None:
cls._singleton = (
super(MetaSingleton, cls).__call__(*args, **kwargs))
return cls._singleton
def logibmsg(fn):
def logmsg_decorator(self, *args, **kwargs):
try:
if self._debug:
args_repr = [repr(a) for a in args]
kwargs_repr = [f"{k}={v!r}" for k, v in kwargs.items()]
signature = ", ".join(args_repr + kwargs_repr)
logger.debug(f"Calling {fn.__name__}({signature})")
print(f"Calling {fn.__name__}({signature})")
return fn(self, *args, **kwargs)
except Exception as e:
logger.exception(f"Exception raised in {fn.__name__}. exception: {str(e)}")
raise e
return logmsg_decorator
class IBApi(EWrapper, EClient):
def __init__(self, cb, _debug):
EClient.__init__(self, self)
EWrapper.__init__(self)
self.cb = cb
self._debug = _debug
@logibmsg
def currentTime(self, time):
""" Server's current time. This method will receive IB server's system
time resulting after the invokation of reqCurrentTime. """
self.cb.currentTime(time)
@logibmsg
def updateAccountTime(self, timeStamp):
logger.debug(f"timeStamp: {timeStamp}")
@logibmsg
def nextValidId(self, orderId):
""" Receives next valid order id."""
logger.debug(f"nextValidId: {orderId}")
self.cb.nextValidId(orderId)
@logibmsg
def connectAck(self):
""" callback signifying completion of successful connection """
self.cb.connectAck()
@logibmsg
def connectionClosed(self):
"""This function is called when TWS closes the sockets
connection with the ActiveX control, or when TWS is shut down."""
logger.debug(f"connectionClosed")
self.cb.connectionClosed()
@logibmsg
def managedAccounts(self, accountsList):
"""Receives a comma-separated string with the managed account ids."""
self.cb.managedAccounts(accountsList)
@logibmsg
def accountDownloadEnd(self, accountName):
"""This is called after a batch updateAccountValue() and
updatePortfolio() is sent."""
self.cb.accountDownloadEnd(accountName)
@logibmsg
def updateAccountValue(self, key, val, currency, accountName):
""" This function is called only when ReqAccountUpdates on
EEClientSocket object has been called. """
logger.debug(f"{key}, {val}, {currency}, {accountName}")
self.cb.updateAccountValue(key, val, currency, accountName)
@logibmsg
def updatePortfolio(self, contract, position,
marketPrice, marketValue,
averageCost, unrealizedPNL,
realizedPNL, accountName):
"""This function is called only when reqAccountUpdates on
EEClientSocket object has been called."""
self.cb.updatePortfolio(contract, position,
marketPrice, marketValue,
averageCost, unrealizedPNL,
realizedPNL, accountName)
@logibmsg
def contractDetails(self, reqId, contractDetails):
"""Receives the full contract's definitions. This method will return all
contracts matching the requested via EEClientSocket::reqContractDetails.
For example, one can obtain the whole option chain with it."""
self.cb.contractDetails(reqId, contractDetails)
@logibmsg
def contractDetailsEnd(self, reqId):
"""This function is called once all contract details for a given
request are received. This helps to define the end of an option
chain."""
self.cb.contractDetailsEnd(reqId)
@logibmsg
def openOrder(self, orderId, contract, order, orderState):
"""This function is called to feed in open orders.
orderID: OrderId - The order ID assigned by TWS. Use to cancel or
update TWS order.
contract: Contract - The Contract class attributes describe the contract.
order: Order - The Order class gives the details of the open order.
orderState: OrderState - The orderState class includes attributes Used
for both pre and post trade margin and commission data."""
self.cb.openOrder(OpenOrderMsg(orderId, contract, order, orderState))
@logibmsg
def openOrderEnd(self):
"""This is called at the end of a given request for open orders."""
logger.debug(f"openOrderEnd")
self.cb.openOrderEnd()
@logibmsg
def orderStatus(self, orderId, status, filled,
remaining, avgFillPrice, permId,
parentId, lastFillPrice, clientId,
whyHeld, mktCapPrice):
"""This event is called whenever the status of an order changes. It is
also fired after reconnecting to TWS if the client has any open orders.
orderId: OrderId - The order ID that was specified previously in the
call to placeOrder()
status:str - The order status. Possible values include:
PendingSubmit - indicates that you have transmitted the order, but have not yet received confirmation that it has been accepted by the order destination. NOTE: This order status is not sent by TWS and should be explicitly set by the API developer when an order is submitted.
PendingCancel - indicates that you have sent a request to cancel the order but have not yet received cancel confirmation from the order destination. At this point, your order is not confirmed canceled. You may still receive an execution while your cancellation request is pending. NOTE: This order status is not sent by TWS and should be explicitly set by the API developer when an order is canceled.
PreSubmitted - indicates that a simulated order type has been accepted by the IB system and that this order has yet to be elected. The order is held in the IB system until the election criteria are met. At that time the order is transmitted to the order destination as specified.
Submitted - indicates that your order has been accepted at the order destination and is working.
Cancelled - indicates that the balance of your order has been confirmed canceled by the IB system. This could occur unexpectedly when IB or the destination has rejected your order.
Filled - indicates that the order has been completely filled.
Inactive - indicates that the order has been accepted by the system (simulated orders) or an exchange (native orders) but that currently the order is inactive due to system, exchange or other issues.
filled:int - Specifies the number of shares that have been executed.
For more information about partial fills, see Order Status for Partial Fills.
remaining:int - Specifies the number of shares still outstanding.
avgFillPrice:float - The average price of the shares that have been executed. This parameter is valid only if the filled parameter value is greater than zero. Otherwise, the price parameter will be zero.
permId:int - The TWS id used to identify orders. Remains the same over TWS sessions.
parentId:int - The order ID of the parent order, used for bracket and auto trailing stop orders.
lastFilledPrice:float - The last price of the shares that have been executed. This parameter is valid only if the filled parameter value is greater than zero. Otherwise, the price parameter will be zero.
clientId:int - The ID of the client (or TWS) that placed the order. Note that TWS orders have a fixed clientId and orderId of 0 that distinguishes them from API orders.
whyHeld:str - This field is used to identify an order held when TWS is trying to locate shares for a short sell. The value used to indicate this is 'locate'.
"""
self.cb.orderStatus(OrderStatusMsg(orderId, status, filled,
remaining, avgFillPrice, permId,
parentId, lastFillPrice, clientId,
whyHeld, mktCapPrice))
@logibmsg
def commissionReport(self, commissionReport):
"""The commissionReport() callback is triggered as follows:
- immediately after a trade execution
- by calling reqExecutions()."""
self.cb.commissionReport(commissionReport)
@logibmsg
def error(self, reqId, errorCode, errorString, advancedOrderRejectJson=""):
self.cb.error(ErrorMsg(reqId, errorCode, errorString, advancedOrderRejectJson))
@logibmsg
def position(self, account, contract, pos, avgCost):
"""This event returns real-time positions for all accounts in
response to the reqPositions() method."""
self.cb.position(account, contract, pos, avgCost)
@logibmsg
def positionEnd(self):
"""This is called once all position data for a given request are
received and functions as an end marker for the position() data. """
self.cb.positionEnd()
@logibmsg
def tickPrice(self, reqId, tickType, price, attrib):
"""Market data tick price callback. Handles all price related ticks."""
self.cb.tickPrice(reqId, tickType, price, attrib)
@logibmsg
def tickSize(self, reqId, tickType, size):
"""Market data tick size callback. Handles all size-related ticks."""
self.cb.tickSize(reqId, tickType, size)
@logibmsg
def tickGeneric(self, reqId, tickType, value):
self.cb.tickGeneric(reqId, tickType, value)
@logibmsg
def realtimeBar(self, reqId, time, open_, high, low, close, volume, wap, count):
self.cb.realtimeBar(RTBar(reqId, time, open_, high, low, close, float(volume), wap, count))
@logibmsg
def historicalData(self, reqId, bar):
self.cb.historicalData(HistBar(reqId, bar))
@logibmsg
def historicalDataUpdate(self, reqId, bar):
'''Not implemented'''
pass
@logibmsg
def historicalDataEnd(self, reqId, start, end):
""" Marks the ending of the historical bars reception. """
self.cb.historicalDataEnd(reqId, start, end)
@logibmsg
def execDetails(self, reqId, contract, execution):
"""This event is fired when the reqExecutions() functions is
invoked, or when an order is filled. """
self.cb.execDetails(reqId, contract, execution)
@logibmsg
def execDetailsEnd(self, reqId):
"""This function is called once all executions have been sent to
a client in response to reqExecutions()."""
pass
@logibmsg
def historicalTicks(self, reqId, ticks, done):
"""For whatToShow=MIDPOINT
"""
for tick in ticks:
self.cb.historicalTicks(reqId, HistTick(tick, 'RT_TICK_MIDPOINT'))
@logibmsg
def historicalTicksBidAsk(self, reqId, ticks, done):
"""returns historical tick data when whatToShow=BID_ASK"""
for tick in ticks:
self.cb.historicalTicks(reqId, HistTick(tick, 'RT_TICK_BID_ASK'))
@logibmsg
def historicalTicksLast(self, reqId, ticks, done):
"""returns tick-by-tick data for tickType = "Last" or "AllLast" """
for tick in ticks:
self.cb.historicalTicks(reqId, HistTick(tick, 'RT_TICK_LAST'))
@logibmsg
def tickByTickAllLast(self, reqId, tickType, time, price, size, tickAtrribLast, exchange, specialConditions):
"""returns tick-by-tick data for tickType = "Last" or "AllLast" """
self.cb.tickByTickAllLast(reqId, tickType, time, price, size, tickAtrribLast, exchange, specialConditions)
@logibmsg
def tickByTickBidAsk(self, reqId, time, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk):
"""returns tick-by-tick data for tickType = "BidAsk" """
self.cb.tickByTickBidAsk(reqId, time, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk)
@logibmsg
def tickByTickMidPoint(self, reqId, time, midPoint):
"""returns tick-by-tick data for tickType = "MidPoint" """
self.cb.tickByTickBidAsk(reqId, time, midPoint)
@logibmsg
def tickString(self, reqId, tickType, value):
self.cb.tickString(reqId, tickType, value)
class IBStore(with_metaclass(MetaSingleton, object)):
'''Singleton class wrapping an ibpy ibConnection instance.
The parameters can also be specified in the classes which use this store,
like ``IBData`` and ``IBBroker``
Params:
- ``host`` (default:``127.0.0.1``): where IB TWS or IB Gateway are
actually running. And although this will usually be the localhost, it
must not be
- ``port`` (default: ``7496``): port to connect to. The demo system uses
``7497``
- ``clientId`` (default: ``None``): which clientId to use to connect to
TWS.
``None``: generates a random id between 1 and 65535
An ``integer``: will be passed as the value to use.
- ``notifyall`` (default: ``False``)
If ``False`` only ``error`` messages will be sent to the
``notify_store`` methods of ``Cerebro`` and ``Strategy``.
If ``True``, each and every message received from TWS will be notified
- ``_debug`` (default: ``False``)
Print all messages received from TWS as info output
- ``reconnect`` (default: ``3``)
Number of attempts to try to reconnect after the 1st connection attempt
fails
Set it to a ``-1`` value to keep on reconnecting forever
- ``timeout`` (default: ``3.0``)
Time in seconds between reconnection attemps
- ``timeoffset`` (default: ``True``)
If True, the time obtained from ``reqCurrentTime`` (IB Server time)
will be used to calculate the offset to localtime and this offset will
be used for the price notifications (tickPrice events, for example for
CASH markets) to modify the locally calculated timestamp.
The time offset will propagate to other parts of the ``backtrader``
ecosystem like the **resampling** to align resampling timestamps using
the calculated offset.
- ``timerefresh`` (default: ``60.0``)
Time in seconds: how often the time offset has to be refreshed
- ``indcash`` (default: ``True``)
Manage IND codes as if they were cash for price retrieval
'''
# Set a base for the data requests (historical/realtime) to distinguish the
# id in the error notifications from orders, where the basis (usually
# starting at 1) is set by TWS
REQIDBASE = 0x01000000
BrokerCls = None # broker class will autoregister
DataCls = None # data class will auto register
params = (
('host', '127.0.0.1'),
('port', 7496),
('clientId', None), # None generates a random clientid 1 -> 2^16
('broker_host', ''),
('broker_request_port', 12345),
('broker_subscribe_port', 12345),
('broker_user_name', ''),
('broker_password', ''),
('notifyall', False),
('_debug', False),
('reconnect', 3), # -1 forever, 0 No, > 0 number of retries
('timeout', 3.0), # timeout between reconnections
('timeoffset', True), # Use offset to server for timestamps if needed
('timerefresh', 60.0), # How often to refresh the timeoffset
('indcash', True), # Treat IND codes as CASH elements
)
@classmethod
def getdata(cls, *args, **kwargs):
'''Returns ``DataCls`` with args, kwargs'''
return cls.DataCls(*args, **kwargs)
@classmethod
def getbroker(cls, *args, **kwargs):
'''Returns broker with *args, **kwargs from registered ``BrokerCls``'''
return cls.BrokerCls(*args, **kwargs)
def __init__(self):
super(IBStore, self).__init__()
self._lock_q = threading.Lock() # sync access to _tickerId/Queues
self._lock_accupd = threading.Lock() # sync account updates
self._lock_pos = threading.Lock() # sync account updates
self._lock_notif = threading.Lock() # sync access to notif queue
self._updacclock = threading.Lock() # sync account updates
# Account list received
self._event_managed_accounts = threading.Event()
self._event_accdownload = threading.Event()
self.dontreconnect = False # for non-recoverable connect errors
self._env = None # reference to cerebro for general notifications
self.broker = None # broker instance
self.datas = list() # datas that have registered over start
self.ccount = 0 # requests to start (from cerebro or datas)
self._lock_tmoffset = threading.Lock()
self.tmoffset = timedelta() # to control time difference with server
# Structures to hold datas requests
self.qs = collections.OrderedDict() # key: tickerId -> queues
self.ts = collections.OrderedDict() # key: queue -> tickerId
self.iscash = dict() # tickerIds from cash products (for ex: EUR.JPY)
self.histexreq = dict() # holds segmented historical requests
self.histfmt = dict() # holds datetimeformat for request
self.histsend = dict() # holds sessionend (data time) for request
self.histtz = dict() # holds sessionend (data time) for request
self.acc_cash = AutoDict() # current total cash per account
self.acc_value = AutoDict() # current total value per account
self.acc_upds = AutoDict() # current account valueinfos per account
self.port_update = False # indicate whether to signal to broker
self.positions = collections.defaultdict(Position) # actual positions
self._tickerId = itertools.count(self.REQIDBASE) # unique tickerIds
self.orderid = None # next possible orderid (will be itertools.count)
self.cdetails = collections.defaultdict(list) # hold cdetails requests
self.managed_accounts = list() # received via managedAccounts
self.notifs = queue.Queue() # store notifications for cerebro
# Use the provided clientId or a random one
if self.p.clientId is None:
self.clientId = random.randint(1, pow(2, 16) - 1)
else:
self.clientId = self.p.clientId
self._debug = self.p._debug
# ibpy connection object
try:
self.conn = IBApi(self, self._debug)
self.conn.connect(self.p.host, self.p.port, self.clientId)
self.apiThread = threading.Thread(target=self.conn.run, daemon=True)
self.apiThread.start()
except Exception as e:
print(f"TWS Failed to connect: {e}")
# This utility key function transforms a barsize into a:
# (Timeframe, Compression) tuple which can be sorted
def keyfn(x):
n, t = x.split()
tf, comp = self._sizes[t]
return (tf, int(n) * comp)
# This utility key function transforms a duration into a:
# (Timeframe, Compression) tuple which can be sorted
def key2fn(x):
n, d = x.split()
tf = self._dur2tf[d]
return (tf, int(n))
# Generate a table of reverse durations
self.revdur = collections.defaultdict(list)
# The table (dict) is a ONE to MANY relation of
# duration -> barsizes
# Here it is reversed to get a ONE to MANY relation of
# barsize -> durations
for duration, barsizes in self._durations.items():
for barsize in barsizes:
self.revdur[keyfn(barsize)].append(duration)
# Once managed, sort the durations according to real duration and not
# to the text form using the utility key above
for barsize in self.revdur:
self.revdur[barsize].sort(key=key2fn)
def start(self, data=None, broker=None):
logger.info(f"START data: {data} broker: {broker}")
self.reconnect(fromstart=True) # reconnect should be an invariant
# Datas require some processing to kickstart data reception
if data is None and broker is None:
self.cash = None
return
# Datas require some processing to kickstart data reception
if data is not None:
self._env = data._env
# For datas simulate a queue with None to kickstart co
self.datas.append(data)
# if connection fails, get a fake registration that will force the
# datas to try to reconnect or else bail out
return self.getTickerQueue(start=True)
elif broker is not None:
self.broker = broker
def stop(self):
try:
self.conn.disconnect() # disconnect should be an invariant
except AttributeError:
pass # conn may have never been connected and lack "disconnect"
# Unblock any calls set on these events
self._event_managed_accounts.set()
self._event_accdownload.set()
# @logibmsg
def connected(self):
# The isConnected method is available through __getattr__ indirections
# and may not be present, which indicates that no connection has been
# made because the subattribute sender has not yet been created, hence
# the check for the AttributeError exception
try:
return self.conn.isConnected()
except AttributeError:
pass
return False # non-connected (including non-initialized)
# @logibmsg
def reconnect(self, fromstart=False, resub=False):
# This method must be an invariant in that it can be called several
# times from the same source and must be consistent. An exampler would
# be 5 datas which are being received simultaneously and all request a
# reconnect
# Policy:
# - if dontreconnect has been set, no option to connect is possible
# - check connection and use the absence of isConnected as signal of
# first ever connection (add 1 to retries too)
# - Calculate the retries (forever or not)
# - Try to connct
# - If achieved and fromstart is false, the datas will be
# re-kickstarted to recreate the subscription
firstconnect = False
try:
if self.conn.isConnected():
if resub:
self.startdatas()
return True # nothing to do
except AttributeError:
# Not connected, several __getattr__ indirections to
# self.conn.sender.client.isConnected
firstconnect = True
if self.dontreconnect:
return False
# This is only invoked from the main thread by datas and therefore no
# lock is needed to control synchronicity to it
retries = self.p.reconnect
if retries >= 0:
retries += firstconnect
while retries < 0 or retries:
logger.debug(f"Retries: {retries}")
if not firstconnect:
logger.debug(f"Reconnect in {self.p.timeout} secs")
time.sleep(self.p.timeout)
firstconnect = False
try:
logger.debug("Connect (host={self.p.host}, port={self.p.port}, clientId={self.clientId})")
if self.conn.connect(self.p.host, self.p.port, self.clientId):
if not fromstart or resub:
self.startdatas()
return True # connection successful
except Exception as e:
logger.exception(f"Failed to Connect {e}")
return False
if retries > 0:
retries -= 1
self.dontreconnect = True
return False # connection/reconnection failed
def startdatas(self):
# kickstrat datas, not returning until all of them have been done
ts = list()
for data in self.datas:
t = threading.Thread(target=data.reqdata)
t.start()
ts.append(t)
for t in ts:
t.join()
@logibmsg
def stopdatas(self):
# stop subs and force datas out of the loop (in LIFO order)
logger.debug(f"Stopping datas")
qs = list(self.qs.values())
ts = list()
for data in self.datas:
t = threading.Thread(target=data.canceldata)
t.start()
ts.append(t)
for t in ts:
t.join()
for q in reversed(qs): # datamaster the last one to get a None
q.put(None)
def get_notifications(self):
'''Return the pending "store" notifications'''
# The background thread could keep on adding notifications. The None
# mark allows to identify which is the last notification to deliver
self.notifs.put(None) # put a mark
notifs = list()
while True:
notif = self.notifs.get()
if notif is None: # mark is reached
break
notifs.append(notif)
return notifs
def error(self, msg):
# 100-199 Order/Data/Historical related
# 200-203 tickerId and Order Related
# 300-399 A mix of things: orders, connectivity, tickers, misc errors
# 400-449 Seem order related again
# 500-531 Connectivity/Communication Errors
# 10000-100027 Mix of special orders/routing
# 1100-1102 TWS connectivy to the outside
# 1300- Socket dropped in client-TWS communication
# 2100-2110 Informative about Data Farm status (id=-1)
# All errors are logged to the environment (cerebro), because many
# errors in Interactive Brokers are actually informational and many may
# actually be of interest to the user
if msg.reqId > 0:
logger.error(f"{msg}")
print(f"Error: {msg}")
else:
logger.debug(f"{msg}")
if msg.reqId == -1 and msg.errorCode == 502:
print(msg.errorString)
if not self.p.notifyall:
self.notifs.put((msg, tuple(vars(msg).values()), dict(vars(msg).items())))
# Manage those events which have to do with connection
if msg.errorCode is None:
# Usually received as an error in connection of just before disconn
pass
elif msg.errorCode in [200, 203, 162, 320, 321, 322]:
# cdetails 200 security not found, notify over right queue
# cdetails 203 security not allowed for acct
try:
q = self.qs[msg.reqId]
except KeyError:
pass # should not happend but it can
else:
logger.warn(f"Cancel data queue for {msg.reqId}")
self.cancelQueue(q, True)
elif msg.errorCode in [354, 420]:
# 354 no subscription, 420 no real-time bar for contract
# the calling data to let the data know ... it cannot resub
try:
q = self.qs[msg.reqId]
except KeyError:
pass # should not happend but it can
else:
q.put(-msg.errorCode)
logger.warn(f"Cancel data queue for {msg.reqId}")
self.cancelQueue(q)
elif msg.errorCode == 10225:
# 10225-Bust event occurred, current subscription is deactivated.
# Please resubscribe real-time bars immediately.
try:
q = self.qs[msg.reqId]
except KeyError:
pass # should not happend but it can
else:
q.put(-msg.errorCode)
elif msg.errorCode == 326: # not recoverable, clientId in use
self.dontreconnect = True
self.conn.disconnect()
self.stopdatas()
elif msg.errorCode == 502:
# Cannot connect to TWS: port, config not open, tws off (504 then)
self.conn.disconnect()
self.stopdatas()
elif msg.errorCode == 504: # Not Connected for data op
# Once for each data
# pass # don't need to manage it
# Connection lost - Notify ... datas will wait on the queue
# with no messages arriving
for q in self.ts: # key: queue -> ticker
q.put(-msg.errorCode)
elif msg.errorCode == 1300:
# TWS has been closed. The port for a new connection is there
# newport = int(msg.errorMsg.split('-')[-1]) # bla bla bla -7496
self.conn.disconnect()
self.stopdatas()
elif msg.errorCode == 1100:
# Connection lost - Notify ... datas will wait on the queue
# with no messages arriving
for q in self.ts: # key: queue -> ticker
q.put(-msg.errorCode)
elif msg.errorCode == 1101:
# Connection restored and tickerIds are gone
for q in self.ts: # key: queue -> ticker
q.put(-msg.errorCode)
elif msg.errorCode == 1102:
# Connection restored and tickerIds maintained
for q in self.ts: # key: queue -> ticker
q.put(-msg.errorCode)
elif msg.errorCode < 500:
# Given the myriad of errorCodes, start by assuming is an order
# error and if not, the checks there will let it go
if msg.reqId < self.REQIDBASE:
if self.broker is not None:
self.broker.push_ordererror(msg)
else:
# Cancel the queue if a "data" reqId error is given: sanity
q = self.qs[msg.reqId]
logger.warn(f"Cancel data queue for {msg.reqId}")
self.cancelQueue(q, True)
def connectionClosed(self):
# Sometmes this comes without 1300/502 or any other and will not be
# seen in error hence the need to manage the situation independently
if self.connected():
self.conn.disconnect()
self.stopdatas()
def updateAccountTime(self, timeStamp):
logger.debug(f"timeStamp: {timeStamp}")
def connectAck(self):
logger.debug(f"connectAck")
def managedAccounts(self, accountsList):
# 1st message in the stream
self.managed_accounts = accountsList.split(',')
self._event_managed_accounts.set()
# Request time to avoid synchronization issues
self.reqCurrentTime()
@logibmsg
def reqCurrentTime(self):
self.conn.reqCurrentTime()
def currentTime(self, time):
if not self.p.timeoffset: # only if requested ... apply timeoffset
return
curtime = datetime.fromtimestamp(float(time))
with self._lock_tmoffset:
self.tmoffset = curtime - datetime.now()
threading.Timer(self.p.timerefresh, self.reqCurrentTime).start()
def timeoffset(self):
with self._lock_tmoffset:
return self.tmoffset
def nextTickerId(self):
# Get the next ticker using next on the itertools.count
return next(self._tickerId)
def nextValidId(self, orderId):
# Create a counter from the TWS notified value to apply to orders
self.orderid = itertools.count(orderId)
def nextOrderId(self):
# Get the next ticker using next on the itertools.count made with the
# notified value from TWS
return next(self.orderid)
def reuseQueue(self, tickerId):
'''Reuses queue for tickerId, returning the new tickerId and q'''
with self._lock_q:
# Invalidate tickerId in qs (where it is a key)
q = self.qs.pop(tickerId, None) # invalidate old
iscash = self.iscash.pop(tickerId, None)
# Update ts: q -> ticker
tickerId = self.nextTickerId() # get new tickerId
self.ts[q] = tickerId # Update ts: q -> tickerId
self.qs[tickerId] = q # Update qs: tickerId -> q
self.iscash[tickerId] = iscash
return tickerId, q
def getTickerQueue(self, start=False):
'''Creates ticker/Queue for data delivery to a data feed'''
q = queue.Queue()
if start:
q.put(None)
return q
with self._lock_q:
tickerId = self.nextTickerId()
self.qs[tickerId] = q # can be managed from other thread
self.ts[q] = tickerId
self.iscash[tickerId] = False
return tickerId, q
def cancelQueue(self, q, sendnone=False):
'''Cancels a Queue for data delivery'''
# pop ts (tickers) and with the result qs (queues)
tickerId = self.ts.pop(q, None)
self.qs.pop(tickerId, None)
self.iscash.pop(tickerId, None)
if sendnone:
q.put(None)
def validQueue(self, q):
'''Returns (bool) if a queue is still valid'''
return q in self.ts # queue -> ticker
def getContractDetails(self, contract, maxcount=None):
cds = list()
q = self.reqContractDetails(contract)
while True:
msg = q.get()
if msg is None:
break
cds.append(msg)
if not cds or (maxcount and len(cds) > maxcount):
err = 'Ambiguous contract: none/multiple answers received'
self.notifs.put((err, cds, {}))
return None
return cds
def reqContractDetails(self, contract):
# get a ticker/queue for identification/data delivery
tickerId, q = self.getTickerQueue()
self.conn.reqContractDetails(tickerId, contract)
return q
def contractDetailsEnd(self, reqId):
'''Signal end of contractdetails'''
logger.debug(f"Cancel data queue tickerId: {reqId} Q: {self.qs[reqId]}")
self.cancelQueue(self.qs[reqId], True)
def contractDetails(self, reqId, contractDetails):
'''Receive answer and pass it to the queue'''
self.qs[reqId].put(contractDetails)
@logibmsg
def reqHistoricalDataEx(self, contract, enddate, begindate,
timeframe, compression,
what=None, useRTH=False, tz='', sessionend=None,
tickerId=None):
'''
Extension of the raw reqHistoricalData proxy, which takes two dates
rather than a duration, barsize and date
It uses the IB published valid duration/barsizes to make a mapping and
spread a historical request over several historical requests if needed
'''
# Keep a copy for error reporting purposes
kwargs = locals().copy()
kwargs.pop('self', None) # remove self, no need to report it
if timeframe < TimeFrame.Seconds:
# Ticks are not supported
return self.getTickerQueue(start=True)
if enddate is None:
enddate = datetime.now()
if begindate is None:
duration = self.getmaxduration(timeframe, compression)
if duration is None:
err = ('No duration for historical data request for '
'timeframe/compresison')
self.notifs.put((err, (), kwargs))
return self.getTickerQueue(start=True)
barsize = self.tfcomp_to_size(timeframe, compression)
if barsize is None:
err = ('No supported barsize for historical data request for '
'timeframe/compresison')
self.notifs.put((err, (), kwargs))
return self.getTickerQueue(start=True)
return self.reqHistoricalData(contract=contract, enddate=enddate,
duration=duration, barsize=barsize,
what=what, useRTH=useRTH, tz=tz,
sessionend=sessionend)
# Check if the requested timeframe/compression is supported by IB
durations = self.getdurations(timeframe, compression)
if not durations: # return a queue and put a None in it
return self.getTickerQueue(start=True)
# Get or reuse a queue
if tickerId is None:
tickerId, q = self.getTickerQueue()
logger.debug(f"Get tickerId: {tickerId} Q: {q}")
else:
tickerId, q = self.reuseQueue(tickerId) # reuse q for old tickerId
logger.debug(f"Reuse tickerId: {tickerId} Q: {q}")
# Get the best possible duration to reduce number of requests
duration = None
for dur in durations:
intdate = self.dt_plus_duration(begindate, dur)
if intdate >= enddate:
intdate = enddate
duration = dur # begin -> end fits in single request
break
if duration is None: # no duration large enough to fit the request
duration = durations[-1]
# Store the calculated data
self.histexreq[tickerId] = dict(
contract=contract, enddate=enddate, begindate=intdate,
timeframe=timeframe, compression=compression,
what=what, useRTH=useRTH, tz=tz, sessionend=sessionend)
barsize = self.tfcomp_to_size(timeframe, compression)
self.histfmt[tickerId] = timeframe >= TimeFrame.Days
self.histsend[tickerId] = sessionend
self.histtz[tickerId] = tz
if contract.secType in ['CASH', 'CFD']:
self.iscash[tickerId] = 1 # msg.field code
if not what:
what = 'BID' # default for cash unless otherwise specified
elif contract.secType in ['IND'] and self.p.indcash:
self.iscash[tickerId] = 4 # msg.field code
what = what or 'TRADES'
self.conn.reqHistoricalData(
tickerId,
contract,
# bytes(intdate.strftime('%Y%m%d %H:%M:%S') + ' GMT'),
bytes(intdate.strftime('%Y%m%d-%H:%M:%S')),
bytes(duration),
bytes(barsize),
bytes(what),
int(useRTH),
2, # dateformat 1 for string, 2 for unix time in seconds
False,
[])
return q
def reqHistoricalData(self, contract, enddate, duration, barsize,
what=None, useRTH=False, tz='', sessionend=None):
'''Proxy to reqHistorical Data'''
# get a ticker/queue for identification/data delivery
tickerId, q = self.getTickerQueue()
if contract.secType in ['CASH', 'CFD']:
self.iscash[tickerId] = True
if not what:
what = 'BID' # TRADES doesn't work
elif what == 'ASK':
self.iscash[tickerId] = 2
else:
what = what or 'TRADES'
# split barsize "x time", look in sizes for (tf, comp) get tf
tframe = self._sizes[barsize.split()[1]][0]
self.histfmt[tickerId] = tframe >= TimeFrame.Days
self.histsend[tickerId] = sessionend
self.histtz[tickerId] = tz
self.conn.reqHistoricalData(
tickerId,
contract,
# bytes(enddate.strftime('%Y%m%d %H:%M:%S') + ' GMT'),
bytes(enddate.strftime('%Y%m%d-%H:%M:%S')),
bytes(duration),
bytes(barsize),
bytes(what),
int(useRTH),
2,
False,
[])
return q
def reqHistoricalTicksEx(self, contract, enddate=None, begindate=None,
what=None, useRTH=False, tz='',
tickerId=None):
'''
Extension of the raw reqHistoricalData proxy, which takes two dates
rather than a duration, barsize and date
It uses the IB published valid duration/barsizes to make a mapping and
spread a historical request over several historical requests if needed
'''
# Keep a copy for error reporting purposes
kwargs = locals().copy()
kwargs.pop('self', None) # remove self, no need to report it
if enddate and begindate:
err = ('Only fromdate OR enddate can be specified not both')
self.notifs.put((err, (), kwargs))
return self.getTickerQueue(start=True)
if enddate is None and begindate is None:
today = datetime.utcnow().date()
begindate = datetime(today.year, today.month, today.day)
# begindate = datetime.now()
logger.debug(f"begin: {begindate} end: {enddate}")
# Get or reuse a queue
if tickerId is None:
tickerId, q = self.getTickerQueue()
logger.debug(f"Get tickerId: {tickerId} Q: {q}")
else:
tickerId, q = self.reuseQueue(tickerId) # reuse q for old tickerId
logger.debug(f"Reuse tickerId: {tickerId} Q: {q}")
if contract.secType in ['CASH', 'CFD']:
self.iscash[tickerId] = 1 # msg.field code
if not what:
what = 'BID' # default for cash unless otherwise specified
elif contract.secType in ['IND'] and self.p.indcash:
self.iscash[tickerId] = 4 # msg.field code
what = what or 'TRADES'
self.conn.reqHistoricalTicks(
tickerId,
contract,
# bytes(begindate.strftime('%Y%m%d %H:%M:%S') + ' GMT') if begindate else '',
# bytes(enddate.strftime('%Y%m%d %H:%M:%S') + ' GMT') if enddate else '',
bytes(begindate.strftime('%Y%m%d-%H:%M:%S')) if begindate else '',
bytes(enddate.strftime('%Y%m%d-%H:%M:%S')) if enddate else '',
100,
bytes(what),
int(useRTH),
True,
[])
return q
def reqHistoricalTicks(self, contract, enddate, begindate,
what=None, useRTH=False, tz=''):
'''Proxy to reqHistoricalTicks'''
# get a ticker/queue for identification/data delivery
tickerId, q = self.getTickerQueue()
if contract.secType in ['CASH', 'CFD']:
self.iscash[tickerId] = True
if not what:
what = 'BID' # TRADES doesn't work
elif what == 'ASK':
self.iscash[tickerId] = 2
else:
what = what or 'TRADES'
if what == 'TRADES_ALL':
what = 'LastAll'
elif what == 'TRADES':
what = 'Last'
elif what == 'BID_ASK':
when = 'BidAsk'
elif what == 'MIDPOINT':
when = 'MidPoint'
self.conn.reqHistoricalTicks(
tickerId,
contract,
# bytes(begindate.strftime('%Y%m%d %H:%M:%S') + ' GMT') if begindate else '',
bytes(begindate.strftime('%Y%m%d-%H:%M:%S')) if begindate else '',
# bytes(enddate.strftime('%Y%m%d %H:%M:%S') + ' GMT'),
# bytes(enddate.strftime('%Y%m%d %H:%M:%S') + ' GMT') if enddate else '',
bytes(enddate.strftime('%Y%m%d-%H:%M:%S')) if enddate else '',
10,
bytes(what),
# int(useRTH),
1,
True,
[])
return q
def cancelHistoricalData(self, q):
'''Cancels an existing HistoricalData request
Params:
- q: the Queue returned by reqMktData
'''
with self._lock_q:
self.conn.cancelHistoricalData(self.ts[q])
logger.warn(f"Cancel data queue for {q}")
self.cancelQueue(q, True)
@logibmsg
def reqRealTimeBars(self, contract, useRTH=False, duration=5, what=None):
'''Creates a request for (5 seconds) Real Time Bars
Params:
- contract: a ib.ext.Contract.Contract intance
- useRTH: (default: False) passed to TWS
- duration: (default: 5) passed to TWS, no other value works in 2016)
Returns:
- a Queue the client can wait on to receive a RTVolume instance
'''
# get a ticker/queue for identification/data delivery
tickerId, q = self.getTickerQueue()
what = what or 'TRADES'
# 20150929 - Only 5 secs supported for duration
self.conn.reqRealTimeBars(
tickerId,
contract,
duration,
# bytes('TRADES'),
bytes(what),
useRTH,
[])
return q
def cancelRealTimeBars(self, q):
'''Cancels an existing MarketData subscription
Params:
- q: the Queue returned by reqMktData
'''
with self._lock_q:
tickerId = self.ts.get(q, None)
if tickerId is not None:
self.conn.cancelRealTimeBars(tickerId)
logger.debug(f"Cancel data queue for {tickerId}")
self.cancelQueue(q, True)
def reqMktData(self, contract, ticks = '233', marketDataType = 3, what=None):
'''Creates a MarketData subscription
Params:
- contract: a ib.ext.Contract.Contract intance
- ticks: (default: '233') passed to TWS
- marketDataType: (default: 3) passed to TWS
Returns:
- a Queue the client can wait on to receive a RTVolume instance
'''
# get a ticker/queue for identification/data delivery
tickerId, q = self.getTickerQueue()
# request RTVOLUME tick delivered over tickString
if contract.secType in ['CASH', 'CFD']:
self.iscash[tickerId] = True
ticks = '' # cash markets do not get RTVOLUME
if what == 'ASK':
self.iscash[tickerId] = 2
# q.put(None) # to kickstart backfilling
# Can request 233 also for cash ... nothing will arrive
self.conn.reqMarketDataType(marketDataType)
self.conn.reqMktData(tickerId, contract, bytes(ticks), False, False, [])
return q
def reqTickByTickData(self, contract, what=None, ignoreSize=True):
'''
Tick-by-tick data corresponding to the data shown in the
TWS Time & Sales Window is available starting with TWS v969 and API v973.04.
'''
if what == 'TRADES':
what = 'Last'
elif what == 'TRADES_ALL':
what = 'AllLast'
elif what == 'BID_ASK':
what = 'BidAsk'
elif what == 'MIDPOINT':
what = 'MidPoint'
else:
what = 'Last'
tickerId, q = self.getTickerQueue()
self.conn.reqMarketDataType(3)
self.conn.reqTickByTickData(tickerId, contract, what, 0, ignoreSize)
return q
def cancelMktData(self, q):
'''Cancels an existing MarketData subscription
Params:
- q: the Queue returned by reqMktData
'''
with self._lock_q:
tickerId = self.ts.get(q, None)
if tickerId is not None:
self.conn.cancelMktData(tickerId)
logger.debug(f"Cancel data queue for {tickerId}")
self.cancelQueue(q, True)
def cancelTickByTickData(self, q):
'''Cancels an existing MarketData subscription
Params:
- q: the Queue returned by reqTickByTickData
'''
with self._lock_q:
tickerId = self.ts.get(q, None)
if tickerId is not None:
self.conn.cancelTickByTickData(tickerId)
logger.debug(f"Cancel data queue for {tickerId}")
self.cancelQueue(q, True)
def tickString(self, reqId, tickType, value):
# Receive and process a tickString message
tickerId = reqId
if tickType == 48: # RTVolume
try:
rtvol = RTVolume(value)
except ValueError: # price not in message ...
pass
else:
# Don't need to adjust the time, because it is in "timestamp"
# form in the message
self.qs[tickerId].put(rtvol)
def tickPrice(self, reqId, tickType, price, attrib):
'''Cash Markets have no notion of "last_price"/"last_size" and the
tracking of the price is done (industry de-facto standard at least with
the IB API) following the BID price
A RTVolume which will only contain a price is put into the client's
queue to have a consistent cross-market interface
'''
# Used for "CASH" markets
# The price field has been seen to be missing in some instances even if
# "field" is 1
tickerId = reqId
fieldcode = self.iscash[tickerId]
if fieldcode:
if tickType == fieldcode: # Expected cash field code
try:
if price == -1.0:
# seems to indicate the stream is halted for example in
# between 23:00 - 23:15 CET for FOREX
return
except AttributeError:
pass
try:
rtvol = RTVolume(price=price, tmoffset=self.tmoffset)
# print('rtvol with datetime:', rtvol.datetime)
except ValueError: # price not in message ...
pass
else:
self.qs[tickerId].put(rtvol)
else:
# Non-cash
try:
if price == -1.0:
# seems to indicate the stream is halted for example in
# between 23:00 - 23:15 CET for FOREX
return
except AttributeError:
pass
rtprice = RTPrice(price=price, tmoffset=self.tmoffset)
self.qs[tickerId].put(rtprice)
def tickSize(self, reqId, tickType, size):
tickerId = reqId
rtsize = RTSize(size=size, tmoffset=self.tmoffset)
self.qs[tickerId].put(rtsize)
def tickGeneric(self, reqId, tickType, value):
try:
if value == -1.0:
# seems to indicate the stream is halted for example in
# between 23:00 - 23:15 CET for FOREX
return
except AttributeError:
pass
tickerId = reqId
value = value # if msg.value != 0.0 else (1.0 + random.random())
rtprice = RTPrice(price=value, tmoffset=self.tmoffset)
self.qs[tickerId].put(rtprice)
def realtimeBar(self, msg):
'''Receives x seconds Real Time Bars (at the time of writing only 5
seconds are supported)
Not valid for cash markets
'''
# Get a naive localtime object
msg.time = datetime.utcfromtimestamp(float(msg.time))
self.qs[msg.reqId].put(msg)
def historicalData(self, msg):
'''Receives the events of a historical data request'''
# For multi-tiered downloads we'd need to rebind the queue to a new
# tickerId (in case tickerIds are not reusable) and instead of putting
# None, issue a new reqHistData with the new data and move formward
tickerId = msg.reqId
q = self.qs[tickerId]
dtstr = msg.date # Format when string req: YYYYMMDD[ HH:MM:SS]
if self.histfmt[tickerId]:
sessionend = self.histsend[tickerId]
dt = datetime.strptime(dtstr, '%Y%m%d')
dteos = datetime.combine(dt, sessionend)
tz = self.histtz[tickerId]
if tz:
dteostz = tz.localize(dteos)
dteosutc = dteostz.astimezone(UTC).replace(tzinfo=None)
# When requesting for example daily bars, the current day
# will be returned with the already happened data. If the
# session end were added, the new ticks wouldn't make it
# through, because they happen before the end of time
else:
dteosutc = dteos
if dteosutc <= datetime.utcnow():
dt = dteosutc
msg.date = dt
else:
msg.date = datetime.utcfromtimestamp(long(dtstr))
q.put(msg)
def historicalDataEnd(self, reqId, start, end):
tickerId = reqId
self.histfmt.pop(tickerId, None)
self.histsend.pop(tickerId, None)
self.histtz.pop(tickerId, None)
kargs = self.histexreq.pop(tickerId, None)
if kargs is not None:
self.reqHistoricalDataEx(tickerId=tickerId, **kargs)
return
q = self.qs[tickerId]
self.cancelQueue(q)
def historicalTicks(self, reqId, tick):
tickerId = reqId
self.qs[tickerId].put(tick)
def historicalTicksEnd(self, reqId):
tickerId = reqId
q = self.qs[tickerId]
self.cancelTickByTickData(q)
def tickByTickBidAsk(self, reqId, time, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk):
tickerId = reqId
tick = RTTickBidAsk(time, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk)
self.qs[tickerId].put(tick)
def tickByTickAllLast(self, reqId, tickType, time, price, size, tickAtrribLast, exchange, specialConditions):
tickerId = reqId
tick = RTTickLast(tickType, time, price, size, tickAtrribLast, exchange, specialConditions)
self.qs[tickerId].put(tick)
def tickByTickMidPoint(self, reqId, time, midPoint):
tickerId = reqId
tick = RTTickMidPoint(time, time, midPoint)
self.qs[tickerId].put(tick)
# The _durations are meant to calculate the needed historical data to
# perform backfilling at the start of a connetion or a connection is lost.
# Using a timedelta as a key allows to quickly find out which bar size
# bar size (values in the tuples int the dict) can be used.
_durations = dict([
# 60 seconds - 1 min
('60 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min')),
# 120 seconds - 2 mins
('120 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins')),
# 180 seconds - 3 mins
('180 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins')),
# 300 seconds - 5 mins
('300 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins')),
# 600 seconds - 10 mins
('600 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins')),
# 900 seconds - 15 mins
('900 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins')),
# 1200 seconds - 20 mins
('1200 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins')),
# 1800 seconds - 30 mins
('1800 S',
('1 secs', '5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins')),
# 3600 seconds - 1 hour
('3600 S',
('5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour')),
# 7200 seconds - 2 hours
('7200 S',
('5 secs', '10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours')),
# 10800 seconds - 3 hours
('10800 S',
('10 secs', '15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours', '3 hours')),
# 14400 seconds - 4 hours
('14400 S',
('15 secs', '30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours', '3 hours', '4 hours')),
# 28800 seconds - 8 hours
('28800 S',
('30 secs',
'1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours', '3 hours', '4 hours', '8 hours')),
# 1 days
('1 D',
('1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours', '3 hours', '4 hours', '8 hours',
'1 day')),
# 2 days
('2 D',
('2 mins', '3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours', '3 hours', '4 hours', '8 hours',
'1 day')),
# 1 weeks
('1 W',
('3 mins', '5 mins', '10 mins', '15 mins',
'20 mins', '30 mins',
'1 hour', '2 hours', '3 hours', '4 hours', '8 hours',
'1 day', '1 W')),
# 2 weeks
('2 W',
('15 mins', '20 mins', '30 mins',
'1 hour', '2 hours', '3 hours', '4 hours', '8 hours',
'1 day', '1 W')),
# 1 months
('1 M',
('30 mins',
'1 hour', '2 hours', '3 hours', '4 hours', '8 hours',
'1 day', '1 W', '1 M')),
# 2+ months
('2 M', ('1 day', '1 W', '1 M')),
('3 M', ('1 day', '1 W', '1 M')),
('4 M', ('1 day', '1 W', '1 M')),
('5 M', ('1 day', '1 W', '1 M')),
('6 M', ('1 day', '1 W', '1 M')),
('7 M', ('1 day', '1 W', '1 M')),
('8 M', ('1 day', '1 W', '1 M')),
('9 M', ('1 day', '1 W', '1 M')),
('10 M', ('1 day', '1 W', '1 M')),
('11 M', ('1 day', '1 W', '1 M')),
# 1+ years
('1 Y', ('1 day', '1 W', '1 M')),
])
# Sizes allow for quick translation from bar sizes above to actual
# timeframes to make a comparison with the actual data
_sizes = {
'secs': (TimeFrame.Seconds, 1),
'min': (TimeFrame.Minutes, 1),
'mins': (TimeFrame.Minutes, 1),
'hour': (TimeFrame.Minutes, 60),
'hours': (TimeFrame.Minutes, 60),
'day': (TimeFrame.Days, 1),
'W': (TimeFrame.Weeks, 1),
'M': (TimeFrame.Months, 1),
}
_dur2tf = {
'S': TimeFrame.Seconds,
'D': TimeFrame.Days,
'W': TimeFrame.Weeks,
'M': TimeFrame.Months,
'Y': TimeFrame.Years,
}
def getdurations(self, timeframe, compression):
key = (timeframe, compression)
if key not in self.revdur:
return []
return self.revdur[key]
def getmaxduration(self, timeframe, compression):
key = (timeframe, compression)
try:
return self.revdur[key][-1]
except (KeyError, IndexError):
pass
return None
def tfcomp_to_size(self, timeframe, compression):
if timeframe == TimeFrame.Months:
return '{} M'.format(compression)
if timeframe == TimeFrame.Weeks:
return '{} W'.format(compression)
if timeframe == TimeFrame.Days:
if not compression % 7:
return '{} W'.format(compression // 7)
return '{} day'.format(compression)
if timeframe == TimeFrame.Minutes:
if not compression % 60:
hours = compression // 60
return ('{} hour'.format(hours)) + ('s' * (hours > 1))
return ('{} min'.format(compression)) + ('s' * (compression > 1))
if timeframe == TimeFrame.Seconds:
return '{} secs'.format(compression)
# Microseconds or ticks
return None
def dt_plus_duration(self, dt, duration):
size, dim = duration.split()
size = int(size)
if dim == 'S':
return dt + timedelta(seconds=size)
if dim == 'D':
return dt + timedelta(days=size)
if dim == 'W':
return dt + timedelta(days=size * 7)
if dim == 'M':
month = dt.month - 1 + size # -1 to make it 0 based, readd below
years, month = divmod(month, 12)
return dt.replace(year=dt.year + years, month=month + 1)
if dim == 'Y':
return dt.replace(year=dt.year + size)
return dt # could do nothing with it ... return it intact
def calcdurations(self, dtbegin, dtend):
'''Calculate a duration in between 2 datetimes'''
duration = self.histduration(dtbegin, dtend)
if duration[-1] == 'M':
m = int(duration.split()[0])
m1 = min(2, m) # (2, 1) -> 1, (2, 7) -> 2. Bottomline: 1 or 2
m2 = max(1, m1) # m1 can only be 1 or 2
checkdur = '{} M'.format(m2)
elif duration[-1] == 'Y':
checkdur = '1 Y'
else:
checkdur = duration
sizes = self._durations[checkdur]
return duration, sizes
def calcduration(self, dtbegin, dtend):
'''Calculate a duration in between 2 datetimes. Returns single size'''
duration, sizes = self._calcdurations(dtbegin, dtend)
return duration, sizes[0]
def histduration(self, dt1, dt2):
# Given two dates calculates the smallest possible duration according
# to the table from the Historical Data API limitations provided by IB
#
# Seconds: 'x S' (x: [60, 120, 180, 300, 600, 900, 1200, 1800, 3600,
# 7200, 10800, 14400, 28800])
# Days: 'x D' (x: [1, 2]
# Weeks: 'x W' (x: [1, 2])
# Months: 'x M' (x: [1, 11])
# Years: 'x Y' (x: [1])
td = dt2 - dt1 # get a timedelta for calculations
# First: array of secs
tsecs = td.total_seconds()
secs = [60, 120, 180, 300, 600, 900, 1200, 1800, 3600, 7200, 10800,
14400, 28800]
idxsec = bisect.bisect_left(secs, tsecs)
if idxsec < len(secs):
return '{} S'.format(secs[idxsec])
tdextra = bool(td.seconds or td.microseconds) # over days/weeks
# Next: 1 or 2 days
days = td.days + tdextra
if td.days <= 2:
return '{} D'.format(days)
# Next: 1 or 2 weeks
weeks, d = divmod(td.days, 7)
weeks += bool(d or tdextra)
if weeks <= 2:
return '{} W'.format(weeks)
# Get references to dt components
y2, m2, d2 = dt2.year, dt2.month, dt2.day
y1, m1, d1 = dt1.year, dt1.month, dt2.day
H2, M2, S2, US2 = dt2.hour, dt2.minute, dt2.second, dt2.microsecond
H1, M1, S1, US1 = dt1.hour, dt1.minute, dt1.second, dt1.microsecond
# Next: 1 -> 11 months (11 incl)
months = (y2 * 12 + m2) - (y1 * 12 + m1) + (
(d2, H2, M2, S2, US2) > (d1, H1, M1, S1, US1))
if months <= 1: # months <= 11
return '1 M' # return '{} M'.format(months)
elif months <= 11:
return '2 M' # cap at 2 months to keep the table clean
# Next: years
# y = y2 - y1 + (m2, d2, H2, M2, S2, US2) > (m1, d1, H1, M1, S1, US1)
# return '{} Y'.format(y)
return '1 Y' # to keep the table clean
def makecontract(self, symbol, sectype, exch, curr,
expiry='', strike=0.0, right='', mult=1,
primaryExch=None, localSymbol=None):
'''returns a contract from the parameters without check'''
contract = Contract()
if localSymbol:
contract.localSymbol = bytes(localSymbol)
else:
contract.symbol = bytes(symbol)
contract.secType = bytes(sectype)
contract.exchange = bytes(exch)
if primaryExch:
contract.primaryExchange = bytes(primaryExch)
if curr:
contract.currency = bytes(curr)
if sectype in ['FUT', 'OPT', 'FOP']:
contract.lastTradeDateOrContractMonth = bytes(expiry)
if sectype in ['OPT', 'FOP']:
contract.strike = strike
contract.right = bytes(right)
if mult:
contract.multiplier = bytes(mult)
return contract
def cancelOrder(self, orderid):
'''Proxy to cancelOrder'''
self.conn.cancelOrder(orderid)
def placeOrder(self, orderid, contract, order):
'''Proxy to placeOrder'''
self.conn.placeOrder(orderid, contract, order)
def openOrder(self, msg):
'''Receive the event ``openOrder`` events'''
self.broker.push_orderstate(msg)
def openOrderEnd(self):
# TODO: Add event to manage order requests
logger.debug(f"openOrderEnd")
def execDetails(self, reqId, contract, execution):
'''Receive execDetails'''
execution.shares = float(execution.shares)
execution.cumQty = float(execution.cumQty)
self.broker.push_execution(execution)
def orderStatus(self, msg):
'''Receive the event ``orderStatus``'''
self.broker.push_orderstatus(msg)
def commissionReport(self, commissionReport):
'''Receive the event commissionReport'''
self.broker.push_commissionreport(commissionReport)
def reqPositions(self):
'''Proxy to reqPositions'''
self.conn.reqPositions()
def position(self, account, contract, pos, avgCost):
'''Receive event positions'''
# Lock access to the position dicts. This is called in sub-thread and
# can kick in at any time
with self._lock_pos:
try:
if not self._event_accdownload.is_set(): # 1st event seen
position = Position(float(pos), float(avgCost))
logger.debug(f"POSITIONS INITIAL: {self.positions}")
self.positions[contract.conId] = position
else:
position = self.positions[contract.conId]
logger.debug(f"POSITION UPDATE: {position}")
if not position.fix(float(pos), avgCost):
err = ('The current calculated position and '
'the position reported by the broker do not match. '
'Operation can continue, but the trades '
'calculated in the strategy may be wrong')
self.notifs.put((err, (), {}))
# self.broker.push_portupdate()
except Exception as e:
logger.exception(f"Exception: {e}")
def positionEnd(self):
logger.debug(f"positionEnd")
def reqAccountUpdates(self, subscribe=True, account=None):
'''Proxy to reqAccountUpdates
If ``account`` is ``None``, wait for the ``managedAccounts`` message to
set the account codes
'''
if account is None:
self._event_managed_accounts.wait()
account = self.managed_accounts[0]
self.conn.reqAccountUpdates(subscribe, bytes(account))
def accountDownloadEnd(self, accountName):
# Signals the end of an account update
# the event indicates it's over. It's only false once, and can be used
# to find out if it has at least been downloaded once
self._event_accdownload.set()
if False:
if self.port_update:
self.broker.push_portupdate()
self.port_update = False
def updatePortfolio(self, contract, pos,
marketPrice, marketValue,
averageCost, unrealizedPNL,
realizedPNL, accountName):
# Lock access to the position dicts. This is called in sub-thread and
# can kick in at any time
with self._lock_pos:
try:
if not self._event_accdownload.is_set(): # 1st event seen
position = Position(float(pos), float(averageCost))
logger.debug(f"POSITIONS INITIAL: {self.positions}")
# self.positions[contract.conId] = position
self.positions.setdefault(contract.conId, position)
else:
position = self.positions[contract.conId]
logger.debug(f"POSITION UPDATE: {position}")
if not position.fix(float(pos), averageCost):
err = ('The current calculated position and '
'the position reported by the broker do not match. '
'Operation can continue, but the trades '
'calculated in the strategy may be wrong')
self.notifs.put((err, (), {}))
# Flag signal to broker at the end of account download
# self.port_update = True
self.broker.push_portupdate()
except Exception as e:
logger.exception(f"Exception: {e}")
def getposition(self, contract, clone=False):
# Lock access to the position dicts. This is called from main thread
# and updates could be happening in the background
with self._lock_pos:
position = self.positions[contract.conId]
if clone:
return copy(position)
return position
@logibmsg
def updateAccountValue(self, key, value, currency, accountName):
# Lock access to the dicts where values are updated. This happens in a
# sub-thread and could kick it at anytime
with self._lock_accupd:
try:
value = float(value)
except ValueError:
value = value
self.acc_upds[accountName][key][currency] = value
if key == 'NetLiquidation':
# NetLiquidationByCurrency and currency == 'BASE' is the same
self.acc_value[accountName] = value
elif key == 'CashBalance' and currency == 'BASE':
self.acc_cash[accountName] = value
@logibmsg
def get_acc_values(self, account=None):
'''Returns all account value infos sent by TWS during regular updates
Waits for at least 1 successful download
If ``account`` is ``None`` then a dictionary with accounts as keys will
be returned containing all accounts
If account is specified or the system has only 1 account the dictionary
corresponding to that account is returned
'''
# Wait for at least 1 account update download to have been finished
# before the account infos can be returned to the calling client
# if self.connected():
# self._event_accdownload.wait()
# Lock access to acc_cash to avoid an event intefering
with self._updacclock:
if account is None:
# wait for the managedAccount Messages
# if self.connected():
# self._event_managed_accounts.wait()
if not self.managed_accounts:
return self.acc_upds.copy()
elif len(self.managed_accounts) > 1:
return self.acc_upds.copy()
# Only 1 account, fall through to return only 1
account = self.managed_accounts[0]
try:
return self.acc_upds[account].copy()
except KeyError:
pass
return self.acc_upds.copy()
@logibmsg
def get_acc_value(self, account=None):
'''Returns the net liquidation value sent by TWS during regular updates
Waits for at least 1 successful download
If ``account`` is ``None`` then a dictionary with accounts as keys will
be returned containing all accounts
If account is specified or the system has only 1 account the dictionary
corresponding to that account is returned
'''
# Wait for at least 1 account update download to have been finished
# before the value can be returned to the calling client
# Lock access to acc_cash to avoid an event intefering
with self._updacclock:
if account is None:
if not self.managed_accounts:
return float()
elif len(self.managed_accounts) > 1:
return sum(self.acc_value.values())
# Only 1 account, fall through to return only 1
account = self.managed_accounts[0]
try:
return self.acc_value[account]
except KeyError:
pass
return float()
@logibmsg
def get_acc_cash(self, account=None):
'''Returns the total cash value sent by TWS during regular updates
Waits for at least 1 successful download
If ``account`` is ``None`` then a dictionary with accounts as keys will
be returned containing all accounts
If account is specified or the system has only 1 account the dictionary
corresponding to that account is returned
'''
# Wait for at least 1 account update download to have been finished
# before the cash can be returned to the calling client
# if self.connected():
# self._event_accdownload.wait()
# Lock access to acc_cash to avoid an event intefering
with self._lock_accupd:
if account is None:
# # wait for the managedAccount Messages
# if self.connected():
# self._event_managed_accounts.wait()
if not self.managed_accounts:
return float()
elif len(self.managed_accounts) > 1:
return sum(self.acc_cash.values())
# Only 1 account, fall through to return only 1
account = self.managed_accounts[0]
try:
return self.acc_cash[account]
except KeyError:
pass
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/broker/ibkr/ibstore.py
|
ibstore.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
import time
import backtrader as bt
from backtrader.feed import DataBase
from backtrader import TimeFrame, date2num, num2date
from backtrader.utils.py3 import (integer_types, queue, string_types,
with_metaclass)
from backtrader.metabase import MetaParams
import ibstore
import logging
logger = logging.getLogger(__name__)
class MetaIBData(DataBase.__class__):
def __init__(cls, name, bases, dct):
'''Class has already been created ... register'''
# Initialize the class
super(MetaIBData, cls).__init__(name, bases, dct)
# Register with the store
ibstore.IBStore.DataCls = cls
class IBData(with_metaclass(MetaIBData, DataBase)):
'''Interactive Brokers Data Feed.
Supports the following contract specifications in parameter ``dataname``:
- TICKER # Stock type and SMART exchange
- TICKER-STK # Stock and SMART exchange
- TICKER-STK-EXCHANGE # Stock
- TICKER-STK-EXCHANGE-CURRENCY # Stock
- TICKER-CFD # CFD and SMART exchange
- TICKER-CFD-EXCHANGE # CFD
- TICKER-CDF-EXCHANGE-CURRENCY # Stock
- TICKER-IND-EXCHANGE # Index
- TICKER-IND-EXCHANGE-CURRENCY # Index
- TICKER-YYYYMM-EXCHANGE # Future
- TICKER-YYYYMM-EXCHANGE-CURRENCY # Future
- TICKER-YYYYMM-EXCHANGE-CURRENCY-MULT # Future
- TICKER-FUT-EXCHANGE-CURRENCY-YYYYMM-MULT # Future
- TICKER-YYYYMM-EXCHANGE-CURRENCY-STRIKE-RIGHT # FOP
- TICKER-YYYYMM-EXCHANGE-CURRENCY-STRIKE-RIGHT-MULT # FOP
- TICKER-FOP-EXCHANGE-CURRENCY-YYYYMM-STRIKE-RIGHT # FOP
- TICKER-FOP-EXCHANGE-CURRENCY-YYYYMM-STRIKE-RIGHT-MULT # FOP
- CUR1.CUR2-CASH-IDEALPRO # Forex
- TICKER-YYYYMMDD-EXCHANGE-CURRENCY-STRIKE-RIGHT # OPT
- TICKER-YYYYMMDD-EXCHANGE-CURRENCY-STRIKE-RIGHT-MULT # OPT
- TICKER-OPT-EXCHANGE-CURRENCY-YYYYMMDD-STRIKE-RIGHT # OPT
- TICKER-OPT-EXCHANGE-CURRENCY-YYYYMMDD-STRIKE-RIGHT-MULT # OPT
Params:
- ``sectype`` (default: ``STK``)
Default value to apply as *security type* if not provided in the
``dataname`` specification
- ``exchange`` (default: ``SMART``)
Default value to apply as *exchange* if not provided in the
``dataname`` specification
- ``primaryExchange`` (default: ``None``)
For certain smart-routed stock contracts that have the same symbol,
currency and exchange, you would also need to specify the primary
exchange attribute to uniquely define the contract. This should be
defined as the native exchange of a contract
- ``right`` (default: ``None``)
Warrants, like options, require an expiration date, a right,
a strike and an optional multiplier.
- ``strike`` (default: ``None``)
Warrants, like options, require an expiration date, a right,
a strike and an optional multiplier.
- ``expiry`` (default: ``None``)
Warrants, like options, require an expiration date, a right,
a strike and an optional multiplier.
In this case expiry is 'lastTradeDateOrContractMonth'
- ``currency`` (default: ``''``)
Default value to apply as *currency* if not provided in the
``dataname`` specification
- ``multiplier`` (default: ``None``)
Occasionally, you can expect to have more than a single future
contract for the same underlying with the same expiry. To rule
out the ambiguity, the contract's multiplier can be given
- ``tradingClass`` (default: ``None``)
It is not unusual to find many option contracts with an almost identical
description (i.e. underlying symbol, strike, last trading date,
multiplier, etc.). Adding more details such as the trading class will help
- ``localSymbol`` (default: ``None``)
Warrants, like options, require an expiration date, a right, a strike and
a multiplier. For some warrants it will be necessary to define a
localSymbol or conId to uniquely identify the contract
- ``historical`` (default: ``False``)
If set to ``True`` the data feed will stop after doing the first
download of data.
The standard data feed parameters ``fromdate`` and ``todate`` will be
used as reference.
The data feed will make multiple requests if the requested duration is
larger than the one allowed by IB given the timeframe/compression
chosen for the data.
- ``what`` (default: ``None``)
If ``None`` the default for different assets types will be used for
historical data requests:
- 'BID' for CASH assets
- 'TRADES' for any other
Use 'ASK' for the Ask quote of cash assets
Check the IB API docs if another value is wished
(TRADES,MIDPOINT,BID,ASK,BID_ASK,ADJUSTED_LAST,HISTORICAL_VOLATILITY,
OPTION_IMPLIED_VOLATILITY, REBATE_RATE, FEE_RATE,
YIELD_BID, YIELD_ASK, YIELD_BID_ASK, YIELD_LAST)
- ``rtbar`` (default: ``False``)
If ``True`` the ``5 Seconds Realtime bars`` provided by Interactive
Brokers will be used as the smalles tick. According to the
documentation they correspond to real-time values (once collated and
curated by IB)
If ``False`` then the ``RTVolume`` prices will be used, which are based
on receiving ticks. In the case of ``CASH`` assets (like for example
EUR.JPY) ``RTVolume`` will always be used and from it the ``bid`` price
(industry de-facto standard with IB according to the literature
scattered over the Internet)
Even if set to ``True``, if the data is resampled/kept to a
timeframe/compression below Seconds/5, no real time bars will be used,
because IB doesn't serve them below that level
- ``qcheck`` (default: ``0.5``)
Time in seconds to wake up if no data is received to give a chance to
resample/replay packets properly and pass notifications up the chain
- ``backfill_start`` (default: ``True``)
Perform backfilling at the start. The maximum possible historical data
will be fetched in a single request.
- ``backfill`` (default: ``True``)
Perform backfilling after a disconnection/reconnection cycle. The gap
duration will be used to download the smallest possible amount of data
- ``backfill_from`` (default: ``None``)
An additional data source can be passed to do an initial layer of
backfilling. Once the data source is depleted and if requested,
backfilling from IB will take place. This is ideally meant to backfill
from already stored sources like a file on disk, but not limited to.
- ``latethrough`` (default: ``False``)
If the data source is resampled/replayed, some ticks may come in too
late for the already delivered resampled/replayed bar. If this is
``True`` those ticks will bet let through in any case.
Check the Resampler documentation to see who to take those ticks into
account.
This can happen especially if ``timeoffset`` is set to ``False`` in
the ``IBStore`` instance and the TWS server time is not in sync with
that of the local computer
- ``tradename`` (default: ``None``)
Useful for some specific cases like ``CFD`` in which prices are offered
by one asset and trading happens in a different onel
- SPY-STK-SMART-USD -> SP500 ETF (will be specified as ``dataname``)
- SPY-CFD-SMART-USD -> which is the corresponding CFD which offers not
price tracking but in this case will be the trading asset (specified
as ``tradename``)
The default values in the params are the to allow things like ```TICKER``,
to which the parameter ``sectype`` (default: ``STK``) and ``exchange``
(default: ``SMART``) are applied.
Some assets like ``AAPL`` need full specification including ``currency``
(default: '') whereas others like ``TWTR`` can be simply passed as it is.
- ``AAPL-STK-SMART-USD`` would be the full specification for dataname
Or else: ``IBData`` as ``IBData(dataname='AAPL', currency='USD')``
which uses the default values (``STK`` and ``SMART``) and overrides
the currency to be ``USD``
'''
params = (
('secType', 'STK'), # usual industry value
('exchange', 'SMART'), # usual industry value
('primaryExchange', None), # native exchange of the contract
('right', None), # Option or Warrant Call('C') or Put('P')
('strike', None), # Future, Option or Warrant strike price
('multiplier', None), # Future, Option or Warrant multiplier
('expiry', None), # Future, Option or Warrant lastTradeDateOrContractMonth date
('currency', ''), # currency for the contract
('localSymbol', None), # Warrant localSymbol override
('rtbar', False), # use RealTime 5 seconds bars
('historical', False), # only historical download
('what', None), # historical - what to show
('useRTH', False), # historical - download only Regular Trading Hours
('qcheck', 0.5), # timeout in seconds (float) to check for events
('backfill_start', True), # do backfilling at the start
('backfill', True), # do backfilling when reconnecting
('backfill_from', None), # additional data source to do backfill from
('latethrough', False), # let late samples through
('tradename', None), # use a different asset as order target
('numberOfTicks', 1000), # Number of distinct data points. Max is 1000 per request.
('ignoreSize', False),
# Omit updates that reflect only changes in size, and not price. Applicable to Bid_Ask data requests.
)
_store = ibstore.IBStore
# Minimum size supported by real-time bars
RTBAR_MINSIZE = (TimeFrame.Seconds, 5)
# States for the Finite State Machine in _load
_ST_FROM, _ST_START, _ST_LIVE, _ST_HISTORBACK, _ST_OVER = range(5)
def _timeoffset(self):
return self.ib.timeoffset()
def _gettz(self):
# If no object has been provided by the user and a timezone can be
# found via contractdtails, then try to get it from pytz, which may or
# may not be available.
# The timezone specifications returned by TWS seem to be abbreviations
# understood by pytz, but the full list which TWS may return is not
# documented and one of the abbreviations may fail
tzstr = isinstance(self.p.tz, string_types)
if self.p.tz is not None and not tzstr:
return bt.utils.date.Localizer(self.p.tz)
if self.contractdetails is None:
return None # nothing can be done
try:
import pytz # keep the import very local
except ImportError:
return None # nothing can be done
tzs = self.p.tz if tzstr else self.contractdetails.timeZoneId
if tzs == 'CST': # reported by TWS, not compatible with pytz. patch it
tzs = 'CST6CDT'
try:
tz = pytz.timezone(tzs)
except pytz.UnknownTimeZoneError:
return None # nothing can be done
# contractdetails there, import ok, timezone found, return it
return tz
def islive(self):
'''Returns ``True`` to notify ``Cerebro`` that preloading and runonce
should be deactivated'''
return not self.p.historical
def __init__(self, **kwargs):
self.ib = self._store(**kwargs)
self.precontract = self.parsecontract(self.p.dataname)
self.pretradecontract = self.parsecontract(self.p.tradename)
def setenvironment(self, env):
'''Receives an environment (cerebro) and passes it over to the store it
belongs to'''
super(IBData, self).setenvironment(env)
env.addstore(self.ib)
def parsecontract(self, dataname):
'''Parses dataname generates a default contract'''
# Set defaults for optional tokens in the ticker string
if dataname is None:
return None
exch = self.p.exchange
primaryExch = self.p.primaryExchange
curr = self.p.currency
expiry = self.p.expiry
strike = self.p.strike
right = self.p.right
mult = self.p.multiplier
localSymbol = self.p.localSymbol
# split the ticker string
tokens = iter(dataname.split('-'))
# Symbol and security type are compulsory
symbol = next(tokens)
try:
sectype = next(tokens)
except StopIteration:
sectype = self.p.secType
# security type can be an expiration date
if sectype.isdigit():
expiry = sectype # save the expiration ate
if len(sectype) == 6: # YYYYMM
sectype = 'FUT'
else: # Assume OPTIONS - YYYYMMDD
sectype = 'OPT'
if sectype == 'CASH': # need to address currency for Forex
symbol, curr = symbol.split('.')
# See if the optional tokens were provided
try:
exch = next(tokens) # on exception it will be the default
curr = next(tokens) # on exception it will be the default
if sectype == 'FUT':
if not expiry:
expiry = next(tokens)
mult = next(tokens)
# Try to see if this is FOP - Futures on OPTIONS
right = next(tokens)
# if still here this is a FOP and not a FUT
sectype = 'FOP'
strike, mult = float(mult), '' # assign to strike and void
mult = next(tokens) # try again to see if there is any
elif sectype == 'OPT':
if not expiry:
expiry = next(tokens)
strike = float(next(tokens)) # on exception - default
right = next(tokens) # on exception it will be the default
mult = next(tokens) # ?? no harm in any case
except StopIteration:
pass
# Make the initial contract
precon = self.ib.makecontract(
symbol=symbol, sectype=sectype, exch=exch, curr=curr,
expiry=expiry, strike=strike, right=right, mult=mult,
primaryExch=primaryExch, localSymbol=localSymbol)
return precon
def start(self):
'''Starts the IB connecction and gets the real contract and
contractdetails if it exists'''
super(IBData, self).start()
# Kickstart store and get queue to wait on
self.qlive = self.ib.start(data=self)
self.qhist = None
self._usertvol = not self.p.rtbar
tfcomp = (self._timeframe, self._compression)
if tfcomp < self.RTBAR_MINSIZE:
# Requested timeframe/compression not supported by rtbars
self._usertvol = True
self.contract = None
self.contractdetails = None
self.tradecontract = None
self.tradecontractdetails = None
if self.p.backfill_from is not None:
self._state = self._ST_FROM
self.p.backfill_from.setenvironment(self._env)
self.p.backfill_from._start()
else:
self._state = self._ST_START # initial state for _load
self._statelivereconn = False # if reconnecting in live state
self._subcription_valid = False # subscription state
self._storedmsg = dict() # keep pending live message (under None)
if not self.ib.connected():
return
self.put_notification(self.CONNECTED)
# get real contract details with real conId (contractId)
cds = self.ib.getContractDetails(self.precontract, maxcount=1)
if cds is not None:
cdetails = cds[0]
self.contract = cdetails.contract
self.contractdetails = cdetails
else:
# no contract can be found (or many)
self.put_notification(self.DISCONNECTED)
return
if self.pretradecontract is None:
# no different trading asset - default to standard asset
self.tradecontract = self.contract
self.tradecontractdetails = self.contractdetails
else:
# different target asset (typical of some CDS products)
# use other set of details
cds = self.ib.getContractDetails(self.pretradecontract, maxcount=1)
if cds is not None:
cdetails = cds[0]
self.tradecontract = cdetails.contract
self.tradecontractdetails = cdetails
else:
# no contract can be found (or many)
self.put_notification(self.DISCONNECTED)
return
if self._state == self._ST_START:
self._start_finish() # to finish initialization
self._st_start()
def stop(self):
'''Stops and tells the store to stop'''
super(IBData, self).stop()
self.ib.stop()
def reqdata(self):
'''request real-time data. checks cash vs non-cash) and param useRT'''
if self.contract is None or self._subcription_valid:
return
if self._usertvol and self._timeframe != bt.TimeFrame.Ticks:
self.qlive = self.ib.reqMktData(self.contract, self.p.what)
elif self._usertvol and self._timeframe == bt.TimeFrame.Ticks:
self.qlive = self.ib.reqTickByTickData(self.contract, self.p.what)
else:
self.qlive = self.ib.reqRealTimeBars(self.contract, what=self.p.what)
self._subcription_valid = True
return self.qlive
def canceldata(self):
'''Cancels Market Data subscription, checking asset type and rtbar'''
if self.contract is None:
return
if self._usertvol and self._timeframe != bt.TimeFrame.Ticks:
self.ib.cancelMktData(self.qlive)
elif self._usertvol and self._timeframe == bt.TimeFrame.Ticks:
self.ib.cancelTickByTickData(self.qlive)
else:
self.ib.cancelRealTimeBars(self.qlive)
def haslivedata(self):
return bool(self._storedmsg or self.qlive)
def _load(self):
if self.contract is None or self._state == self._ST_OVER:
return False # nothing can be done
while True:
if self._state == self._ST_LIVE:
try:
msg = (self._storedmsg.pop(None, None) or
self.qlive.get(timeout=self._qcheck))
except queue.Empty:
if True:
return None
# Code invalidated until further checking is done
# if not self._statelivereconn:
# return None # indicate timeout situation
# # Awaiting data and nothing came in - fake it up until now
# dtend = self.num2date(date2num(datetime.datetime.utcnow()))
# dtbegin = None
# if len(self) > 1:
# dtbegin = self.num2date(self.datetime[-1])
# self.qhist = self.ib.reqHistoricalDataEx(
# contract=self.contract,
# enddate=dtend, begindate=dtbegin,
# timeframe=self._timeframe,
# compression=self._compression,
# what=self.p.what, useRTH=self.p.useRTH, tz=self._tz,
# sessionend=self.p.sessionend)
# if self._laststatus != self.DELAYED:
# self.put_notification(self.DELAYED)
# self._state = self._ST_HISTORBACK
# self._statelivereconn = False
# continue # to reenter the loop and hit st_historback
if msg is None: # Conn broken during historical/backfilling
self._subcription_valid = False
self.put_notification(self.CONNBROKEN)
# Try to reconnect
if not self.ib.reconnect(resub=True):
self.put_notification(self.DISCONNECTED)
return False # failed
self._statelivereconn = self.p.backfill
continue
if msg == -504: # Conn broken during live
self._subcription_valid = False
self.put_notification(self.CONNBROKEN)
# Try to reconnect
if not self.ib.reconnect(resub=True):
self.put_notification(self.DISCONNECTED)
return False # failed
# self._statelivereconn = self.p.backfill
continue
if msg == -354:
self.put_notification(self.NOTSUBSCRIBED)
return False
elif msg == -1100: # conn broken
# Tell to wait for a message to do a backfill
# self._state = self._ST_DISCONN
self._subcription_valid = False
self._statelivereconn = self.p.backfill
continue
elif msg == -1102: # conn broken/restored tickerId maintained
# The message may be duplicated
if not self._statelivereconn:
self._statelivereconn = self.p.backfill
continue
elif msg == -1101: # conn broken/restored tickerId gone
# The message may be duplicated
self._subcription_valid = False
if not self._statelivereconn:
self._statelivereconn = self.p.backfill
self.reqdata() # resubscribe
continue
elif msg == -10225: # Bust event occurred, current subscription is deactivated.
self._subcription_valid = False
if not self._statelivereconn:
self._statelivereconn = self.p.backfill
self.reqdata() # resubscribe
continue
elif isinstance(msg, integer_types):
# Unexpected notification for historical data skip it
# May be a "not connected not yet processed"
self.put_notification(self.UNKNOWN, msg)
continue
# Process the message according to expected return type
if not self._statelivereconn:
if self._laststatus != self.LIVE:
if self.qlive.qsize() <= 1: # very short live queue
self.put_notification(self.LIVE)
if self._usertvol and self._timeframe != bt.TimeFrame.Ticks:
ret = self._load_rtvolume(msg)
elif self._usertvol and self._timeframe == bt.TimeFrame.Ticks:
ret = self._load_rtticks(msg)
else:
ret = self._load_rtbar(msg)
if ret:
return True
# could not load bar ... go and get new one
continue
# Fall through to processing reconnect - try to backfill
self._storedmsg[None] = msg # keep the msg
# else do a backfill
if self._laststatus != self.DELAYED:
self.put_notification(self.DELAYED)
dtend = None
if len(self) > 1:
# len == 1 ... forwarded for the 1st time
# get begin date in utc-like format like msg.datetime
dtbegin = num2date(self.datetime[-1])
elif self.fromdate > float('-inf'):
dtbegin = num2date(self.fromdate)
else: # 1st bar and no begin set
# passing None to fetch max possible in 1 request
dtbegin = None
dtend = msg.datetime if self._usertvol else msg.time
if self._timeframe != bt.TimeFrame.Ticks:
self.qhist = self.ib.reqHistoricalDataEx(
contract=self.contract, enddate=dtend, begindate=dtbegin,
timeframe=self._timeframe, compression=self._compression,
what=self.p.what, useRTH=self.p.useRTH, tz=self._tz,
sessionend=self.p.sessionend)
else:
# dtend = num2date(dtend)
self.qhist = self.ib.reqHistoricalTicksEx(
contract=self.contract, enddate=dtend,
what=self.p.what, useRTH=self.p.useRTH, tz=self._tz,
)
self._state = self._ST_HISTORBACK
self._statelivereconn = False # no longer in live
continue
elif self._state == self._ST_HISTORBACK:
try:
msg = self.qhist.get(timeout=self.p.qcheck)
except queue.Empty:
if True:
if self.p.historical: # only historical
self.put_notification(self.DISCONNECTED)
return False # end of historical
# Live is also wished - go for it
self._state = self._ST_LIVE
continue
if msg is None: # Conn broken during historical/backfilling
# Situation not managed. Simply bail out
self._subcription_valid = False
self.put_notification(self.DISCONNECTED)
return False # error management cancelled the queue
elif msg == -354: # Data not subscribed
self._subcription_valid = False
self.put_notification(self.NOTSUBSCRIBED)
return False
elif msg == -420: # No permissions for the data
self._subcription_valid = False
self.put_notification(self.NOTSUBSCRIBED)
return False
elif isinstance(msg, integer_types):
# Unexpected notification for historical data skip it
# May be a "not connected not yet processed"
self.put_notification(self.UNKNOWN, msg)
continue
if msg.date is not None:
if self._timeframe == bt.TimeFrame.Ticks:
if self._load_rtticks(msg, hist=True):
return True
else:
if self._load_rtbar(msg, hist=True):
return True # loading worked
# the date is from overlapping historical request
continue
# End of histdata
if self.p.historical: # only historical
self.put_notification(self.DISCONNECTED)
return False # end of historical
# Live is also required - go for it
self._state = self._ST_LIVE
continue
elif self._state == self._ST_FROM:
if not self.p.backfill_from.next():
# additional data source is consumed
self._state = self._ST_START
continue
# copy lines of the same name
for alias in self.lines.getlinealiases():
lsrc = getattr(self.p.backfill_from.lines, alias)
ldst = getattr(self.lines, alias)
ldst[0] = lsrc[0]
return True
elif self._state == self._ST_START:
if not self._st_start():
return False
def _st_start(self):
if self.p.historical:
self.put_notification(self.DELAYED)
dtend = None
if self.todate < float('inf'):
dtend = num2date(self.todate)
dtbegin = None
if self.fromdate > float('-inf'):
dtbegin = num2date(self.fromdate)
if self._timeframe == bt.TimeFrame.Ticks:
self.qhist = self.ib.reqHistoricalTicksEx(
contract=self.contract, enddate=dtend, begindate=dtbegin,
what=self.p.what, useRTH=self.p.useRTH, tz=self._tz)
else:
self.qhist = self.ib.reqHistoricalDataEx(
contract=self.contract, enddate=dtend, begindate=dtbegin,
timeframe=self._timeframe, compression=self._compression,
what=self.p.what, useRTH=self.p.useRTH, tz=self._tz,
sessionend=self.p.sessionend)
self._state = self._ST_HISTORBACK
return True # continue before
# Live is requested
if not self.ib.reconnect(resub=True):
self.put_notification(self.DISCONNECTED)
self._state = self._ST_OVER
return False # failed - was so
self._statelivereconn = self.p.backfill_start
if self.p.backfill_start:
self.put_notification(self.DELAYED)
self._state = self._ST_LIVE
return True # no return before - implicit continue
def _load_rtbar(self, rtbar, hist=False):
# A complete 5 second bar made of real-time ticks is delivered and
# contains open/high/low/close/volume prices
# The historical data has the same data but with 'date' instead of
# 'time' for datetime
dt = date2num(rtbar.time if not hist else rtbar.date)
if dt < self.lines.datetime[-1] and not self.p.latethrough:
return False # cannot deliver earlier than already delivered
self.lines.datetime[0] = dt
# Put the tick into the bar
self.lines.open[0] = rtbar.open
self.lines.high[0] = rtbar.high
self.lines.low[0] = rtbar.low
self.lines.close[0] = rtbar.close
self.lines.volume[0] = rtbar.volume
self.lines.openinterest[0] = 0
return True
def _load_rtvolume(self, rtvol):
# A single tick is delivered and is therefore used for the entire set
# of prices. Ideally the
# contains open/high/low/close/volume prices
# Datetime transformation
dt = date2num(rtvol.datetime)
if dt < self.lines.datetime[-1] and not self.p.latethrough:
return False # cannot deliver earlier than already delivered
self.lines.datetime[0] = dt
# Put the tick into the bar
tick = rtvol.price if rtvol.price else self.lines.close[-1]
self.lines.open[0] = tick
self.lines.high[0] = tick
self.lines.low[0] = tick
self.lines.close[0] = tick
self.lines.volume[0] = rtvol.size if rtvol.size else self.lines.volume[-1]
self.lines.openinterest[0] = 0
return True
def _load_rtticks(self, tick, hist=False):
dt = date2num(tick.datetime if not hist else tick.date)
if dt < self.lines.datetime[-1] and not self.p.latethrough:
return False # cannot deliver earlier than already delivered
self.lines.datetime[0] = dt
if tick.dataType == 'RT_TICK_MIDPOINT':
self.lines.close[0] = tick.midPoint
elif tick.dataType == 'RT_TICK_BID_ASK':
self.lines.open[0] = tick.bidPrice
self.lines.close[0] = tick.askPrice
self.lines.volume[0] = tick.bidSize
self.lines.openinterest[0] = tick.askSize
elif tick.dataType == 'RT_TICK_LAST':
self.lines.close[0] = tick.price
self.lines.volume[0] = tick.size
return True
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/broker/ibkr/ibdata.py
|
ibdata.py
|
from collections import defaultdict
import asyncio
import logging
from typing import Optional, ByteString
from aiokafka import AIOKafkaProducer
from aiokafka.errors import RequestTimedOutError, KafkaConnectionError, NodeNotReadyError
# from yapic import json
# from cryptofeed.backends.backend import BackendBookCallback, BackendCallback, BackendQueue
LOG = logging.getLogger('feedhandler')
class KafkaCallback(BackendQueue):
def __init__(self, key=None, numeric_type=float, none_to=None, **kwargs):
"""
You can pass configuration options to AIOKafkaProducer as keyword arguments.
(either individual kwargs, an unpacked dictionary `**config_dict`, or both)
A full list of configuration parameters can be found at
https://aiokafka.readthedocs.io/en/stable/api.html#aiokafka.AIOKafkaProducer
A 'value_serializer' option allows use of other schemas such as Avro, Protobuf etc.
The default serialization is JSON Bytes
Example:
**{'bootstrap_servers': '127.0.0.1:9092',
'client_id': 'cryptofeed',
'acks': 1,
'value_serializer': your_serialization_function}
(Passing the event loop is already handled)
"""
self.producer_config = kwargs
self.producer = None
self.key: str = key or self.default_key
self.numeric_type = numeric_type
self.none_to = none_to
# Do not allow writer to send messages until connection confirmed
self.running = False
def _default_serializer(self, to_bytes: dict | str) -> ByteString:
if isinstance(to_bytes, dict):
return json.dumpb(to_bytes)
elif isinstance(to_bytes, str):
return to_bytes.encode()
else:
raise TypeError(f'{type(to_bytes)} is not a valid Serialization type')
async def _connect(self):
if not self.producer:
loop = asyncio.get_event_loop()
try:
config_keys = ', '.join([k for k in self.producer_config.keys()])
LOG.info(f'{self.__class__.__name__}: Configuring AIOKafka with the following parameters: {config_keys}')
self.producer = AIOKafkaProducer(**self.producer_config, loop=loop)
# Quit if invalid config option passed to AIOKafka
except (TypeError, ValueError) as e:
LOG.error(f'{self.__class__.__name__}: Invalid AIOKafka configuration: {e.args}{chr(10)}See https://aiokafka.readthedocs.io/en/stable/api.html#aiokafka.AIOKafkaProducer for list of configuration options')
raise SystemExit
else:
while not self.running:
try:
await self.producer.start()
except KafkaConnectionError:
LOG.error(f'{self.__class__.__name__}: Unable to bootstrap from host(s)')
await asyncio.sleep(10)
else:
LOG.info(f'{self.__class__.__name__}: "{self.producer.client._client_id}" connected to cluster containing {len(self.producer.client.cluster.brokers())} broker(s)')
self.running = True
def topic(self, data: dict) -> str:
return f"{self.key}-{data['exchange']}-{data['symbol']}"
def partition_key(self, data: dict) -> Optional[bytes]:
return None
def partition(self, data: dict) -> Optional[int]:
return None
async def writer(self):
await self._connect()
while self.running:
async with self.read_queue() as updates:
for index in range(len(updates)):
topic = self.topic(updates[index])
# Check for user-provided serializers, otherwise use default
value = updates[index] if self.producer_config.get('value_serializer') else self._default_serializer(updates[index])
key = self.key if self.producer_config.get('key_serializer') else self._default_serializer(self.key)
partition = self.partition(updates[index])
try:
send_future = await self.producer.send(topic, value, key, partition)
await send_future
except RequestTimedOutError:
LOG.error(f'{self.__class__.__name__}: No response received from server within {self.producer._request_timeout_ms} ms. Messages may not have been delivered')
except NodeNotReadyError:
LOG.error(f'{self.__class__.__name__}: Node not ready')
except Exception as e:
LOG.info(f'{self.__class__.__name__}: Encountered an error:{chr(10)}{e}')
LOG.info(f"{self.__class__.__name__}: sending last messages and closing connection '{self.producer.client._client_id}'")
await self.producer.stop()
class TradeKafka(KafkaCallback, BackendCallback):
default_key = 'trades'
class FundingKafka(KafkaCallback, BackendCallback):
default_key = 'funding'
class BookKafka(KafkaCallback, BackendBookCallback):
default_key = 'book'
def __init__(self, *args, snapshots_only=False, snapshot_interval=1000, **kwargs):
self.snapshots_only = snapshots_only
self.snapshot_interval = snapshot_interval
self.snapshot_count = defaultdict(int)
super().__init__(*args, **kwargs)
class TickerKafka(KafkaCallback, BackendCallback):
default_key = 'ticker'
class OpenInterestKafka(KafkaCallback, BackendCallback):
default_key = 'open_interest'
class LiquidationsKafka(KafkaCallback, BackendCallback):
default_key = 'liquidations'
class CandlesKafka(KafkaCallback, BackendCallback):
default_key = 'candles'
class OrderInfoKafka(KafkaCallback, BackendCallback):
default_key = 'order_info'
class TransactionsKafka(KafkaCallback, BackendCallback):
default_key = 'transactions'
class BalancesKafka(KafkaCallback, BackendCallback):
default_key = 'balances'
class FillsKafka(KafkaCallback, BackendCallback):
default_key = 'fills'
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/backend/kafka.py
|
kafka.py
|
from collections import defaultdict
import asyncio
import logging
from textwrap import wrap
from yapic import json
from algovison.backends.backend import BackendQueue, BackendBookCallback, BackendCallback
LOG = logging.getLogger('feedhandler')
class UDPProtocol:
def __init__(self, loop):
self.loop = loop
self.transport = None
def connection_made(self, transport):
self.transport = transport
def datagram_received(self, data, addr):
pass
def error_received(self, exc):
LOG.error('UDP backend received exception: %s', exc)
self.transport.close()
self.transport = None
def connection_lost(self, exc):
LOG.error('UDP backend connection lost: %s', exc)
self.transport.close()
self.transport = None
class SocketCallback(BackendQueue):
def __init__(self, addr: str, port=None, none_to=None, numeric_type=float, key=None, mtu=1400, **kwargs):
"""
Common parent class for all socket callbacks
Parameters
----------
addr: str
Address for connection. Should be in the format:
<protocol>://<address>
Example:
tcp://127.0.0.1
uds:///tmp/crypto.uds
udp://127.0.0.1
port: int
port for connection. Should not be specified for UDS connections
mtu: int
MTU for UDP message size. Should be slightly less than actual MTU for overhead
"""
self.conn_type = addr[:6]
if self.conn_type not in {'tcp://', 'uds://', 'udp://'}:
raise ValueError("Invalid protocol specified for SocketCallback")
self.conn = None
self.protocol = None
self.addr = addr[6:]
self.port = port
self.mtu = mtu
self.numeric_type = numeric_type
self.none_to = none_to
self.key = key if key else self.default_key
self.running = True
async def writer(self):
while self.running:
await self.connect()
async with self.read_queue() as updates:
for update in updates:
data = {'type': self.key, 'data': update}
data = json.dumps(data)
if self.conn_type == 'udp://':
if len(update) > self.mtu:
chunks = wrap(update, self.mtu)
for chunk in chunks:
msg = json.dumps({'type': 'chunked', 'chunks': len(chunks), 'data': chunk}).encode()
self.conn.sendto(msg)
else:
self.conn.sendto(data.encode())
else:
self.conn.write(data.encode())
async def connect(self):
if not self.conn:
if self.conn_type == 'udp://':
loop = asyncio.get_event_loop()
self.conn, self.protocol = await loop.create_datagram_endpoint(
lambda: UDPProtocol(loop), remote_addr=(self.addr, self.port))
elif self.conn_type == 'tcp://':
_, self.conn = await asyncio.open_connection(host=self.addr, port=self.port)
elif self.conn_type == 'uds://':
_, self.conn = await asyncio.open_unix_connection(path=self.addr)
class TradeSocket(SocketCallback, BackendCallback):
default_key = 'trades'
class FundingSocket(SocketCallback, BackendCallback):
default_key = 'funding'
class BookSocket(SocketCallback, BackendBookCallback):
default_key = 'book'
def __init__(self, *args, snapshots_only=False, snapshot_interval=1000, **kwargs):
self.snapshots_only = snapshots_only
self.snapshot_interval = snapshot_interval
self.snapshot_count = defaultdict(int)
super().__init__(*args, **kwargs)
class TickerSocket(SocketCallback, BackendCallback):
default_key = 'ticker'
class OpenInterestSocket(SocketCallback, BackendCallback):
default_key = 'open_interest'
class LiquidationsSocket(SocketCallback, BackendCallback):
default_key = 'liquidations'
class CandlesSocket(SocketCallback, BackendCallback):
default_key = 'candles'
class OrderInfoSocket(SocketCallback, BackendCallback):
default_key = 'order_info'
class TransactionsSocket(SocketCallback, BackendCallback):
default_key = 'transactions'
class BalancesSocket(SocketCallback, BackendCallback):
default_key = 'balances'
class FillsSocket(SocketCallback, BackendCallback):
default_key = 'fills'
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/backend/socket.py
|
socket.py
|
import asyncio
from asyncio.queues import Queue
from multiprocessing import Pipe, Process
from contextlib import asynccontextmanager
SHUTDOWN_SENTINEL = 'STOP'
class BackendQueue:
def start(self, loop: asyncio.AbstractEventLoop, multiprocess=False):
if hasattr(self, 'started') and self.started:
# prevent a backend callback from starting more than 1 writer and creating more than 1 queue
return
self.multiprocess = multiprocess
if self.multiprocess:
self.queue = Pipe(duplex=False)
self.worker = Process(target=BackendQueue.worker, args=(self.writer,), daemon=True)
self.worker.start()
else:
self.queue = Queue()
self.worker = loop.create_task(self.writer())
self.started = True
async def stop(self):
if self.multiprocess:
self.queue[1].send(SHUTDOWN_SENTINEL)
self.worker.join()
else:
await self.queue.put(SHUTDOWN_SENTINEL)
self.running = False
@staticmethod
def worker(writer):
try:
loop = asyncio.new_event_loop()
loop.run_until_complete(writer())
except KeyboardInterrupt:
pass
async def writer(self):
raise NotImplementedError
async def write(self, data):
if self.multiprocess:
self.queue[1].send(data)
else:
await self.queue.put(data)
@asynccontextmanager
async def read_queue(self) -> list:
if self.multiprocess:
msg = self.queue[0].recv()
if msg == SHUTDOWN_SENTINEL:
self.running = False
yield []
else:
yield [msg]
else:
current_depth = self.queue.qsize()
if current_depth == 0:
update = await self.queue.get()
if update == SHUTDOWN_SENTINEL:
yield []
else:
yield [update]
self.queue.task_done()
else:
ret = []
count = 0
while current_depth > count:
update = await self.queue.get()
count += 1
if update == SHUTDOWN_SENTINEL:
self.running = False
break
ret.append(update)
yield ret
for _ in range(count):
self.queue.task_done()
class BackendCallback:
async def __call__(self, dtype, receipt_timestamp: float):
data = dtype.to_dict(numeric_type=self.numeric_type, none_to=self.none_to)
if not dtype.timestamp:
data['timestamp'] = receipt_timestamp
data['receipt_timestamp'] = receipt_timestamp
await self.write(data)
class BackendBookCallback:
async def _write_snapshot(self, book, receipt_timestamp: float):
data = book.to_dict(numeric_type=self.numeric_type, none_to=self.none_to)
del data['delta']
if not book.timestamp:
data['timestamp'] = receipt_timestamp
data['receipt_timestamp'] = receipt_timestamp
await self.write(data)
async def __call__(self, book, receipt_timestamp: float):
if self.snapshots_only:
await self._write_snapshot(book, receipt_timestamp)
else:
data = book.to_dict(delta=book.delta is not None, numeric_type=self.numeric_type, none_to=self.none_to)
if not book.timestamp:
data['timestamp'] = receipt_timestamp
data['receipt_timestamp'] = receipt_timestamp
if book.delta is None:
del data['delta']
else:
self.snapshot_count[book.symbol] += 1
await self.write(data)
if self.snapshot_interval <= self.snapshot_count[book.symbol] and book.delta:
await self._write_snapshot(book, receipt_timestamp)
self.snapshot_count[book.symbol] = 0
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/backend/backend.py
|
backend.py
|
import asyncio
import logging
import os
from collections import defaultdict
from asyncio import Queue, CancelledError
from contextlib import asynccontextmanager, suppress
from typing import List, Union, AsyncIterable
from decimal import Decimal
import atexit
from dataclasses import dataclass
import requests
import websockets
import aiohttp
from aiohttp.typedefs import StrOrURL
# from yapic import json as json_parser
class Feed:
"""
"""
def __init__(self, candle_interval='1m', candle_closed_only=True, timeout=120, timeout_interval=30, retries=10, symbols=None, channels=None, subscription=None, callbacks=None, max_depth=0, checksum_validation=False, cross_check=False, exceptions=None, log_message_on_error=False, delay_start=0, http_proxy: StrOrURL = None, **kwargs):
"""
candle_interval: str
the candle interval. See the specific exchange to see what intervals they support
candle_closed_only: bool
returns only closed/completed candles (if supported by exchange).
timeout: int
Time, in seconds, between message to wait before a feed is considered dead and will be restarted.
Set to -1 for infinite.
timeout_interval: int
Time, in seconds, between timeout checks.
retries: int
Number of times to retry a failed connection. Set to -1 for infinite
symbols: list of str, Symbol
A list of instrument symbols. Symbols must be of type str or Symbol
max_depth: int
Maximum number of levels per side to return in book updates. 0 is the default, and indicates no trimming of levels should be performed.
candle_interval: str
Length of time between a candle's Open and Close. Valid on exchanges with support for candles
checksum_validation: bool
Toggle checksum validation, when supported by an exchange.
cross_check: bool
Toggle a check for a crossed book. Should not be needed on exchanges that support
checksums or provide message sequence numbers.
exceptions: list of exceptions
These exceptions will not be handled internally and will be passed to the asyncio exception handler. To
handle them feedhandler will need to be supplied with a custom exception handler. See the `run` method
on FeedHandler, specifically the `exception_handler` keyword argument.
log_message_on_error: bool
If an exception is encountered in the connection handler, log the raw message
delay_start: int, float
a delay before starting the feed/connection to the exchange. If you are subscribing to a large number of feeds
on a single exchange, you may encounter 429s. You can use this to stagger the starts.
http_proxy: str
URL of proxy server. Passed to HTTPPoll and HTTPAsyncConn. Only used for HTTP GET requests.
"""
super().__init__(**kwargs)
self.log_on_error = log_message_on_error
self.retries = retries
self.exceptions = exceptions
self.connection_handlers = []
self.timeout = timeout
self.timeout_interval = timeout_interval
self.subscription = defaultdict(set)
self.cross_check = cross_check
self.normalized_symbols = []
self.max_depth = max_depth
self.previous_book = defaultdict(dict)
self.checksum_validation = checksum_validation
self.requires_authentication = False
self._feed_config = defaultdict(list)
self.http_conn = HTTPAsyncConn(self.id, http_proxy)
self.http_proxy = http_proxy
self.start_delay = delay_start
self.candle_interval = candle_interval
self.candle_closed_only = candle_closed_only
self._sequence_no = {}
if self.valid_candle_intervals != NotImplemented:
if candle_interval not in self.valid_candle_intervals:
raise ValueError(f"Candle interval must be one of {self.valid_candle_intervals}")
if self.candle_interval_map != NotImplemented:
self.normalize_candle_interval = {value: key for key, value in self.candle_interval_map.items()}
if subscription is not None and (symbols is not None or channels is not None):
raise ValueError("Use subscription, or channels and symbols, not both")
if subscription is not None:
for channel in subscription:
chan = self.std_channel_to_exchange(channel)
if self.is_authenticated_channel(channel):
if not self.key_id or not self.key_secret:
raise ValueError("Authenticated channel subscribed to, but no auth keys provided")
self.requires_authentication = True
self.normalized_symbols.extend(subscription[channel])
self.subscription[chan].update([self.std_symbol_to_exchange_symbol(symbol) for symbol in subscription[channel]])
self._feed_config[channel].extend(self.normalized_symbols)
if symbols and channels:
if any(self.is_authenticated_channel(chan) for chan in channels):
if not self.key_id or not self.key_secret:
raise ValueError("Authenticated channel subscribed to, but no auth keys provided")
self.requires_authentication = True
# if we dont have a subscription dict, we'll use symbols+channels and build one
[self._feed_config[channel].extend(symbols) for channel in channels]
self.normalized_symbols = symbols
self.normalized_channels = channels
symbols = [self.std_symbol_to_exchange_symbol(symbol) for symbol in symbols]
channels = list(set([self.std_channel_to_exchange(chan) for chan in channels]))
self.subscription = {chan: symbols for chan in channels}
self._feed_config = dict(self._feed_config)
self._auth_token = None
self._l3_book = {}
self._l2_book = {}
self.callbacks = {FUNDING: Callback(None),
INDEX: Callback(None),
L2_BOOK: Callback(None),
L3_BOOK: Callback(None),
LIQUIDATIONS: Callback(None),
OPEN_INTEREST: Callback(None),
TICKER: Callback(None),
TRADES: Callback(None),
CANDLES: Callback(None),
ORDER_INFO: Callback(None),
FILLS: Callback(None),
BALANCES: Callback(None),
POSITIONS: Callback(None)
}
if callbacks:
for cb_type, cb_func in callbacks.items():
self.callbacks[cb_type] = cb_func
for key, callback in self.callbacks.items():
if not isinstance(callback, list):
self.callbacks[key] = [callback]
def _connect_rest(self):
"""
Child classes should override this method to generate connection objects that
support their polled REST endpoints.
"""
return []
def connect(self) -> List[Tuple[AsyncConnection, Callable[[None], None], Callable[[str, float], None]]]:
"""
Generic websocket connection method for exchanges. Uses the websocket endpoints defined in the
exchange to determine, based on the subscription information, which endpoints should be used,
and what instruments/channels should be enabled on each connection.
Connect returns a list of tuples. Each tuple contains
1. an AsyncConnection object
2. the subscribe function pointer associated with this connection
3. the message handler for this connection
4. The authentication method for this connection
"""
def limit_sub(subscription: dict, limit: int, auth, options: dict):
ret = []
sub = {}
for channel in subscription:
for pair in subscription[channel]:
if channel not in sub:
sub[channel] = []
sub[channel].append(pair)
if sum(map(len, sub.values())) == limit:
ret.append((WSAsyncConn(addr, self.id, authentication=auth, subscription=sub, **options), self.subscribe, self.message_handler, self.authenticate))
sub = {}
if sum(map(len, sub.values())) > 0:
ret.append((WSAsyncConn(addr, self.id, authentication=auth, subscription=sub, **options), self.subscribe, self.message_handler, self.authenticate))
return ret
ret = self._connect_rest()
for endpoint in self.websocket_endpoints:
auth = None
if endpoint.authentication:
# if a class has an endpoint with the authentication flag set to true, this
# method must be define. The method will be called immediately before connecting
# to authenticate the connection. _ws_authentication returns a tuple of address and ws options
auth = self._ws_authentication
limit = endpoint.limit
addr = self._address()
addr = endpoint.get_address(self.sandbox) if addr is None else addr
if not addr:
continue
# filtering can only be done on normalized symbols, but this subscription needs to have the raw/exchange specific
# subscription, so we need to temporarily convert the symbols back and forth. It has to be done here
# while in the context of the class
temp_sub = {chan: [self.exchange_symbol_to_std_symbol(s) for s in symbols] for chan, symbols in self.subscription.items()}
filtered_sub = {chan: [self.std_symbol_to_exchange_symbol(s) for s in symbols] for chan, symbols in endpoint.subscription_filter(temp_sub).items()}
count = sum(map(len, filtered_sub.values()))
if not self.allow_empty_subscriptions and (not filtered_sub or count == 0):
continue
if limit and count > limit:
ret.extend(limit_sub(filtered_sub, limit, auth, endpoint.options))
else:
if isinstance(addr, list):
for add in addr:
ret.append((WSAsyncConn(add, self.id, authentication=auth, subscription=filtered_sub, **endpoint.options), self.subscribe, self.message_handler, self.authenticate))
else:
ret.append((WSAsyncConn(addr, self.id, authentication=auth, subscription=filtered_sub, **endpoint.options), self.subscribe, self.message_handler, self.authenticate))
return ret
def _ws_authentication(self, address: str, ws_options: dict) -> Tuple[str, dict]:
'''
Used to do authentication immediately before connecting. Takes the address and the websocket options as
arguments and returns a new address and new websocket options that will be used to connect.
'''
raise NotImplementedError
def _address(self):
'''
If you need to dynamically calculate the address before connecting, overload this method in the exchange object.
'''
return None
@property
def address(self) -> Union[List, str]:
if len(self.websocket_endpoints) == 0:
return
addrs = [ep.get_address(sandbox=self.sandbox) for ep in self.websocket_endpoints]
return addrs[0] if len(addrs) == 1 else addrs
async def book_callback(self, book_type: str, book: OrderBook, receipt_timestamp: float, timestamp=None, raw=None, sequence_number=None, checksum=None, delta=None):
if self.cross_check:
self.check_bid_ask_overlapping(book)
book.timestamp = timestamp
book.raw = raw
book.sequence_number = sequence_number
book.delta = delta
book.checksum = checksum
await self.callback(book_type, book, receipt_timestamp)
def check_bid_ask_overlapping(self, data):
bid, ask = data.book.bids, data.book.asks
if len(bid) > 0 and len(ask) > 0:
best_bid, best_ask = bid.index(0)[0], ask.index(0)[0]
if best_bid >= best_ask:
raise BidAskOverlapping(f"{self.id} - {data.symbol}: best bid {best_bid} >= best ask {best_ask}")
async def callback(self, data_type, obj, receipt_timestamp):
for cb in self.callbacks[data_type]:
await cb(obj, receipt_timestamp)
async def message_handler(self, msg: str, conn: AsyncConnection, timestamp: float):
raise NotImplementedError
async def subscribe(self, connection: AsyncConnection):
raise NotImplementedError
async def authenticate(self, connection: AsyncConnection):
pass
async def shutdown(self):
LOG.info('%s: feed shutdown starting...', self.id)
await self.http_conn.close()
for callbacks in self.callbacks.values():
for callback in callbacks:
if hasattr(callback, 'stop'):
LOG.info('%s: stopping backend %s', self.id, self.backend_name(callback))
await callback.stop()
for c in self.connection_handlers:
await c.conn.close()
LOG.info('%s: feed shutdown completed', self.id)
def stop(self):
for c in self.connection_handlers:
c.running = False
def start(self, loop: asyncio.AbstractEventLoop):
"""
Create tasks for exchange interfaces and backends
"""
for conn, sub, handler, auth in self.connect():
self.connection_handlers.append(ConnectionHandler(conn, sub, handler, auth, self.retries, timeout=self.timeout, timeout_interval=self.timeout_interval, exceptions=self.exceptions, log_on_error=self.log_on_error, start_delay=self.start_delay))
self.connection_handlers[-1].start(loop)
for callbacks in self.callbacks.values():
for callback in callbacks:
if hasattr(callback, 'start'):
LOG.info('%s: starting backend task %s with multiprocessing=%s', self.id, self.backend_name(callback), 'True' if self.config.backend_multiprocessing else 'False')
# Backends start tasks to write messages
callback.start(loop, multiprocess=self.config.backend_multiprocessing)
def backend_name(self, callback):
if hasattr(callback, '__class__'):
if hasattr(callback, 'handler'):
return callback.handler.__class__.__name__ + "+" + callback.__class__.__name__
return callback.__class__.__name__
return callback.__name__
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/data/feed.py
|
feed.py
|
import copy
import json
import re
import sys
from collections.abc import Iterable, Mapping
from collections.abc import Sequence as SequenceABC
from dataclasses import InitVar, dataclass, field, fields
from functools import reduce, wraps
from operator import mul
from typing import Any, Callable, ClassVar, Dict, List, Optional, Tuple, Union
from typing import Sequence as Sequence_
import logging
import numpy as np
import pandas as pd
import pyarrow as pa
import pyarrow.compute as pc
import pyarrow.types
from pandas.api.extensions import ExtensionArray as PandasExtensionArray
from pandas.api.extensions import ExtensionDtype as PandasExtensionDtype
logger = logging.get_logger(__name__)
def get_nested_type(schema: FeatureType) -> pa.DataType:
"""
get_nested_type() converts a datasets.FeatureType into a pyarrow.DataType, and acts as the inverse of
generate_from_arrow_type().
It performs double-duty as the implementation of Features.type and handles the conversion of
datasets.Feature->pa.struct
"""
# Nested structures: we allow dict, list/tuples, sequences
if isinstance(schema, Features):
return pa.struct(
{key: get_nested_type(schema[key]) for key in schema}
) # Features is subclass of dict, and dict order is deterministic since Python 3.6
elif isinstance(schema, dict):
return pa.struct(
{key: get_nested_type(schema[key]) for key in schema}
) # however don't sort on struct types since the order matters
elif isinstance(schema, (list, tuple)):
if len(schema) != 1:
raise ValueError("When defining list feature, you should just provide one example of the inner type")
value_type = get_nested_type(schema[0])
return pa.list_(value_type)
# Other objects are callable which returns their data type (ClassLabel, Array2D, Translation, Arrow datatype creation methods)
return schema()
class Features(dict):
"""
"""
def __init__(*args, **kwargs):
if not args:
raise TypeError("descriptor '__init__' of 'Features' object needs an argument")
self, *args = args
super(Features, self).__init__(*args, **kwargs)
def __reduce__(self):
return Features, (dict(self),)
@property
def type(self):
"""
Features field types.
Returns:
:obj:`pyarrow.DataType`
"""
return get_nested_type(self)
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/data/dataset/features.py
|
features.py
|
import contextlib
import copy
import itertools
import json
import os
import posixpath
import re
import shutil
import sys
import tempfile
import time
import warnings
import weakref
from collections import Counter
from collections.abc import Mapping
from copy import deepcopy
from fnmatch import fnmatch
from functools import partial, wraps
from io import BytesIO
from math import ceil, floor
from pathlib import Path
from random import sample
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Callable,
Dict,
Iterable,
Iterator,
List,
Optional,
Tuple,
Union,
overload,
)
from typing import Sequence as Sequence_
import fsspec
import numpy as np
import pandas as pd
import pyarrow as pa
import pyarrow.compute as pc
# from huggingface_hub import DatasetCard, DatasetCardData, HfApi, HfFolder
from multiprocessing import Pool
from requests import HTTPError
import logging
logger = logging.get_logger(__name__)
class DatasetInfoMixin:
"""This base class exposes some attributes of DatasetInfo
at the base level of the Dataset for easy access.
"""
def __init__(self, info: DatasetInfo, split: Optional[NamedSplit]):
self._info = info
self._split = split
@property
def info(self):
"""[`~datasets.DatasetInfo`] object containing all the metadata in the dataset."""
return self._info
@property
def split(self):
"""[`~datasets.NamedSplit`] object corresponding to a named dataset split."""
return self._split
@property
def builder_name(self) -> str:
return self._info.builder_name
@property
def citation(self) -> str:
return self._info.citation
@property
def config_name(self) -> str:
return self._info.config_name
@property
def dataset_size(self) -> Optional[int]:
return self._info.dataset_size
@property
def description(self) -> str:
return self._info.description
@property
def download_checksums(self) -> Optional[dict]:
return self._info.download_checksums
@property
def download_size(self) -> Optional[int]:
return self._info.download_size
@property
def features(self) -> Optional[Features]:
return self._info.features.copy() if self._info.features is not None else None
@property
def homepage(self) -> Optional[str]:
return self._info.homepage
@property
def license(self) -> Optional[str]:
return self._info.license
@property
def size_in_bytes(self) -> Optional[int]:
return self._info.size_in_bytes
@property
def supervised_keys(self):
return self._info.supervised_keys
@property
def task_templates(self):
return self._info.task_templates
@property
def version(self):
return self._info.version
def update_metadata_with_features(table: Table, features: Features):
"""To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema."""
features = Features({col_name: features[col_name] for col_name in table.column_names})
if table.schema.metadata is None or b"huggingface" not in table.schema.metadata:
pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features))
else:
metadata = json.loads(table.schema.metadata[b"huggingface"].decode())
if "info" not in metadata:
metadata["info"] = asdict(DatasetInfo(features=features))
else:
metadata["info"]["features"] = asdict(DatasetInfo(features=features))["features"]
pa_metadata = {"huggingface": json.dumps(metadata)}
table = table.replace_schema_metadata(pa_metadata)
return table
def _check_table(table) -> Table:
"""We check the table type to make sure it's an instance of :class:`datasets.table.Table`"""
if isinstance(table, pa.Table):
# for a pyarrow table, we can just consider it as a in-memory table
# this is here for backward compatibility
return InMemoryTable(table)
elif isinstance(table, Table):
return table
else:
raise TypeError(f"Expected a pyarrow.Table or a datasets.table.Table object, but got {table}.")
def _check_column_names(column_names: List[str]):
"""Check the column names to make sure they don't contain duplicates."""
counter = Counter(column_names)
if not all(count == 1 for count in counter.values()):
duplicated_columns = [col for col in counter if counter[col] > 1]
raise ValueError(f"The table can't have duplicated columns but columns {duplicated_columns} are duplicated.")
def _check_valid_indices_value(index, size):
if (index < 0 and index + size < 0) or (index >= size):
raise IndexError(f"Index {index} out of range for dataset of size {size}.")
class NonExistentDatasetError(Exception):
"""Used when we expect the existence of a dataset"""
pass
class Dataset(DatasetInfoMixin):
"""
"""
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/data/dataset/base.py
|
base.py
|
import json
import datetime
import uuid
import asyncio
from aiokafka import AIOKafkaProducer, AIOKafkaConsumer
import ib_insync as ibi
import logging
from .producer import Producer
config ={
'ib_client':'127.0.0.1',
'ib_port': 7497,
'ib_clientId': 11,
'kafka_bootstrap_servers': 'localhost:9092'
}
class IBMarketProducer(Producer):
"""
Market data stream handler
"""
def __init__(self, config, tickers, logger=None):
super().__init__(self, config['kafka_bootstrap_servers'])
self.client = config['ib_client']
self.port = config['ib_port']
self.clientId = config['ib_clientId']
self.logger = logger
self.tickers = tickers
self.subscriptions = []
self.ib = ibi.IB().connect()
self.producer = AIOKafkaProducer(
bootstrap_servers=config['kafka_bootstrap_servers'])
@property
async def startProducer(self):
self.producer = await self.producer.start()
# self.producer = producer
self.logger.info('Kafka producer started')
async def streamMarketData(self, marketDataType):
await self.producer.start()
with await self.ib.connectAsync():
contracts = [
ibi.Stock(symbol, 'SMART', 'USD')
for symbol in self.tickers]
for contract in contracts:
self.ib.reqMarketDataType(3)
self.ib.reqMktData(contract)
async for tickers in self.ib.pendingTickersEvent:
for ticker in tickers:
# data = json.dumps(ticker.dict())
stock_dict = ticker.contract.dict()
data = {'name': ticker.contract.symbol,
'message_id': str(uuid.uuid4()),
'timestamp': str(datetime.datetime.utcnow()),
'symbol': ticker.contract.symbol,
'exchange': ticker.contract.exchange,
'currency': ticker.contract.currency,
# 'time': datetime.datetime(2023, 4, 24, 23, 0, 32, 864429, tzinfo=datetime.timezone.utc),
'time': str(ticker.time),
'bid': ticker.bid,
'bidSize': ticker.bidSize,
'ask': ticker.ask,
'askSize': ticker.askSize,
'last': ticker.last,
'lastSize': ticker.lastSize,
'prevBid': ticker.prevBid,
'prevBidSize': ticker.prevBidSize,
'prevAsk': ticker.prevAsk,
'prevAskSize': ticker.prevAskSize,
'prevLast': ticker.prevLast,
'prevLastSize': ticker.prevLastSize,
'volume': ticker.volume,
'open': ticker.open,
'high': ticker.high,
'low': ticker.low,
'close': ticker.close,
'vwap': ticker.vwap,
}
# print(ticker.dict())
print(json.dumps(data))
print(stock_dict)
print(datetime.datetime.now(), ticker.close)
msg_data = json.dumps(data).encode("ascii")
await self.producer.send('mktDataStream', msg_data)
# response = ProducerResponse(
# name=msg.name, message_id=msg.message_id, topic=topicname
# )
# logger.info(response)
def disconnectIb(self):
self.ib.disconnect()
self.logger.info('IB disconnected')
def stop_producer(self):
self.producer.stop()
self.logger.info('Kafka producer stopped')
def stop(self):
self.disconnectIb()
self.stop_producer()
def deserializer(serialized):
return json.loads(serialized)
async def consume():
# consumer will decompress messages automatically
# in accordance to compression type specified in producer
consumer = AIOKafkaConsumer(
'mktDataStream',
bootstrap_servers='localhost:9092',
value_deserializer=deserializer)
await consumer.start()
data = await consumer.getmany(timeout_ms=10000)
for tp, messages in data.items():
for message in messages:
print(type(message.value), message.value)
await consumer.stop()
if __name__ == '__main__':
test_stream = IBMarketProducer(config=config, tickers=['GOOG', 'AAPL'])
test_stream.streamMarketData(marketDataType=3)
asyncio.run(consume())
|
AlgoVision-Quant-Research
|
/AlgoVision_Quant_Research-0.0.2-py3-none-any.whl/data/live/producers/ibkr_producer.py
|
ibkr_producer.py
|
__all__ = [
'NormalDist',
'StatisticsError',
'correlation',
'covariance',
'fmean',
'geometric_mean',
'harmonic_mean',
'linear_regression',
'mean',
'median',
'median_grouped',
'median_high',
'median_low',
'mode',
'multimode',
'pstdev',
'pvariance',
'quantiles',
'stdev',
'variance',
]
import math
import numbers
import random
from fractions import Fraction
from decimal import Decimal
from itertools import groupby, repeat
from bisect import bisect_left, bisect_right
from math import hypot, sqrt, fabs, exp, erf, tau, log, fsum
from operator import itemgetter, mul
from collections import Counter, namedtuple
# === Exceptions ===
class StatisticsError(ValueError):
pass
# === Private utilities ===
def _sum(data, start=0):
"""_sum(data [, start]) -> (type, sum, count)
Return a high-precision sum of the given numeric data as a fraction,
together with the type to be converted to and the count of items.
"""
count = 0
n, d = _exact_ratio(start)
partials = {d: n}
partials_get = partials.get
T = _coerce(int, type(start))
for typ, values in groupby(data, type):
T = _coerce(T, typ) # or raise TypeError
for n, d in map(_exact_ratio, values):
count += 1
partials[d] = partials_get(d, 0) + n
if None in partials:
# The sum will be a NAN or INF. We can ignore all the finite
# partials, and just look at this special one.
total = partials[None]
assert not _isfinite(total)
else:
# Sum all the partial sums using builtin sum.
# FIXME is this faster if we sum them in order of the denominator?
total = sum(Fraction(n, d) for d, n in sorted(partials.items()))
return (T, total, count)
def _isfinite(x):
try:
return x.is_finite() # Likely a Decimal.
except AttributeError:
return math.isfinite(x) # Coerces to float first.
def _coerce(T, S):
"""Coerce types T and S to a common type, or raise TypeError.
Coercion rules are currently an implementation detail. See the CoerceTest
test class in test_statistics for details.
"""
# See http://bugs.python.org/issue24068.
assert T is not bool, "initial type T is bool"
# If the types are the same, no need to coerce anything. Put this
# first, so that the usual case (no coercion needed) happens as soon
# as possible.
if T is S: return T
# Mixed int & other coerce to the other type.
if S is int or S is bool: return T
if T is int: return S
# If one is a (strict) subclass of the other, coerce to the subclass.
if issubclass(S, T): return S
if issubclass(T, S): return T
# Ints coerce to the other type.
if issubclass(T, int): return S
if issubclass(S, int): return T
# Mixed fraction & float coerces to float (or float subclass).
if issubclass(T, Fraction) and issubclass(S, float):
return S
if issubclass(T, float) and issubclass(S, Fraction):
return T
# Any other combination is disallowed.
msg = "don't know how to coerce %s and %s"
raise TypeError(msg % (T.__name__, S.__name__))
def _exact_ratio(x):
"""Return Real number x to exact (numerator, denominator) pair.
"""
try:
# Optimise the common case of floats. We expect that the most often
# used numeric type will be builtin floats, so try to make this as
# fast as possible.
if type(x) is float or type(x) is Decimal:
return x.as_integer_ratio()
try:
# x may be an int, Fraction, or Integral ABC.
return (x.numerator, x.denominator)
except AttributeError:
try:
# x may be a float or Decimal subclass.
return x.as_integer_ratio()
except AttributeError:
# Just give up?
pass
except (OverflowError, ValueError):
# float NAN or INF.
assert not _isfinite(x)
return (x, None)
msg = "can't convert type '{}' to numerator/denominator"
raise TypeError(msg.format(type(x).__name__))
def _convert(value, T):
"""Convert value to given numeric type T."""
if type(value) is T:
# This covers the cases where T is Fraction, or where value is
# a NAN or INF (Decimal or float).
return value
if issubclass(T, int) and value.denominator != 1:
T = float
try:
# FIXME: what do we do if this overflows?
return T(value)
except TypeError:
if issubclass(T, Decimal):
return T(value.numerator) / T(value.denominator)
else:
raise
def _find_lteq(a, x):
'Locate the leftmost value exactly equal to x'
i = bisect_left(a, x)
if i != len(a) and a[i] == x:
return i
raise ValueError
def _find_rteq(a, l, x):
'Locate the rightmost value exactly equal to x'
i = bisect_right(a, x, lo=l)
if i != (len(a) + 1) and a[i - 1] == x:
return i - 1
raise ValueError
def _fail_neg(values, errmsg='negative value'):
"""Iterate over values, failing if any are less than zero."""
for x in values:
if x < 0:
raise StatisticsError(errmsg)
yield x
# === Measures of central tendency (averages) ===
def mean(data):
"""Return the sample arithmetic mean of data.
"""
if iter(data) is data:
data = list(data)
n = len(data)
if n < 1:
raise StatisticsError('mean requires at least one data point')
T, total, count = _sum(data)
assert count == n
return _convert(total / n, T)
def fmean(data, weights=None):
"""Convert data to floats and compute the arithmetic mean.
This runs faster than the mean() function and it always returns a float.
If the input dataset is empty, it raises a StatisticsError.
"""
try:
n = len(data)
except TypeError:
# Handle iterators that do not define __len__().
n = 0
def count(iterable):
nonlocal n
for n, x in enumerate(iterable, start=1):
yield x
data = count(data)
if weights is None:
total = fsum(data)
if not n:
raise StatisticsError('fmean requires at least one data point')
return total / n
try:
num_weights = len(weights)
except TypeError:
weights = list(weights)
num_weights = len(weights)
num = fsum(map(mul, data, weights))
if n != num_weights:
raise StatisticsError('data and weights must be the same length')
den = fsum(weights)
if not den:
raise StatisticsError('sum of weights must be non-zero')
return num / den
def geometric_mean(data):
"""Convert data to floats and compute the geometric mean.
Raises a StatisticsError if the input dataset is empty.
"""
try:
return exp(fmean(map(log, data)))
except ValueError:
raise StatisticsError('geometric mean requires a non-empty dataset '
' containing positive numbers') from None
def harmonic_mean(data, weights=None):
"""Return the harmonic mean of data.
The harmonic mean is the reciprocal of the arithmetic mean of the
reciprocals of the data. It can be used for averaging ratios or
rates, for example speeds.
If ``data`` is empty, or any element is less than zero,
``harmonic_mean`` will raise ``StatisticsError``.
"""
if iter(data) is data:
data = list(data)
errmsg = 'harmonic mean does not support negative values'
n = len(data)
if n < 1:
raise StatisticsError('harmonic_mean requires at least one data point')
elif n == 1 and weights is None:
x = data[0]
if isinstance(x, (numbers.Real, Decimal)):
if x < 0:
raise StatisticsError(errmsg)
return x
else:
raise TypeError('unsupported type')
if weights is None:
weights = repeat(1, n)
sum_weights = n
else:
if iter(weights) is weights:
weights = list(weights)
if len(weights) != n:
raise StatisticsError('Number of weights does not match data size')
_, sum_weights, _ = _sum(w for w in _fail_neg(weights, errmsg))
try:
data = _fail_neg(data, errmsg)
T, total, count = _sum(w / x if w else 0 for w, x in zip(weights, data))
except ZeroDivisionError:
return 0
if total <= 0:
raise StatisticsError('Weighted sum must be positive')
return _convert(sum_weights / total, T)
# FIXME: investigate ways to calculate medians without sorting? Quickselect?
def median(data):
"""Return the median (middle value) of numeric data.
When the number of data points is odd, return the middle data point.
When the number of data points is even, the median is interpolated by
taking the average of the two middle values.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
i = n // 2
return (data[i - 1] + data[i]) / 2
def median_low(data):
"""Return the low median of numeric data.
When the number of data points is odd, the middle value is returned.
When it is even, the smaller of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
return data[n // 2 - 1]
def median_high(data):
"""Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
return data[n // 2]
def median_grouped(data, interval=1):
"""Return the 50th percentile (median) of grouped continuous data.
This calculates the median as the 50th percentile, and should be
used when your data is continuous and grouped.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
elif n == 1:
return data[0]
# Find the value at the midpoint. Remember this corresponds to the
# centre of the class interval.
x = data[n // 2]
for obj in (x, interval):
if isinstance(obj, (str, bytes)):
raise TypeError('expected number but got %r' % obj)
try:
L = x - interval / 2 # The lower limit of the median interval.
except TypeError:
# Mixed type. For now we just coerce to float.
L = float(x) - float(interval) / 2
# Uses bisection search to search for x in data with log(n) time complexity
# Find the position of leftmost occurrence of x in data
l1 = _find_lteq(data, x)
# Find the position of rightmost occurrence of x in data[l1...len(data)]
# Assuming always l1 <= l2
l2 = _find_rteq(data, l1, x)
cf = l1
f = l2 - l1 + 1
return L + interval * (n / 2 - cf) / f
def mode(data):
"""Return the most common data point from discrete or nominal data.
``mode`` assumes discrete data, and returns a single value.
If *data* is empty, ``mode``, raises StatisticsError.
"""
pairs = Counter(iter(data)).most_common(1)
try:
return pairs[0][0]
except IndexError:
raise StatisticsError('no mode for empty data') from None
def multimode(data):
"""Return a list of the most frequently occurring values.
Will return more than one result if there are multiple modes
or an empty list if *data* is empty.
"""
counts = Counter(iter(data)).most_common()
maxcount, mode_items = next(groupby(counts, key=itemgetter(1)), (0, []))
return list(map(itemgetter(0), mode_items))
# === Measures of spread ===
def _ss(data, c=None):
"""Return sum of square deviations of sequence data.
"""
if c is not None:
T, total, count = _sum((x-c)**2 for x in data)
return (T, total)
c = mean(data)
T, total, count = _sum((x-c)**2 for x in data)
# The following sum should mathematically equal zero, but due to rounding
# error may not.
U, total2, count2 = _sum((x - c) for x in data)
assert T == U and count == count2
total -= total2 ** 2 / len(data)
assert not total < 0, 'negative sum of square deviations: %f' % total
return (T, total)
def variance(data, xbar=None):
"""Return the sample variance of data.
data should be an iterable of Real-valued numbers, with at least two
values. The optional argument xbar, if given, should be the mean of
the data.
"""
if iter(data) is data:
data = list(data)
n = len(data)
if n < 2:
raise StatisticsError('variance requires at least two data points')
T, ss = _ss(data, xbar)
return _convert(ss / (n - 1), T)
def pvariance(data, mu=None):
"""Return the population variance of ``data``.
data should be a sequence or iterable of Real-valued numbers, with at least one
value. The optional argument mu, if given, should be the mean of
the data.
"""
if iter(data) is data:
data = list(data)
n = len(data)
if n < 1:
raise StatisticsError('pvariance requires at least one data point')
T, ss = _ss(data, mu)
return _convert(ss / n, T)
def stdev(data, xbar=None):
"""Return the square root of the sample variance.
"""
var = variance(data, xbar)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var)
def pstdev(data, mu=None):
"""Return the square root of the population variance.
"""
var = pvariance(data, mu)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var)
# === Statistics for relations between two inputs ===
# See https://en.wikipedia.org/wiki/Covariance
# https://en.wikipedia.org/wiki/Pearson_correlation_coefficient
# https://en.wikipedia.org/wiki/Simple_linear_regression
def covariance(x, y, /):
"""Covariance
Return the sample covariance of two inputs *x* and *y*. Covariance
is a measure of the joint variability of two inputs.
"""
n = len(x)
if len(y) != n:
raise StatisticsError('covariance requires that both inputs have same number of data points')
if n < 2:
raise StatisticsError('covariance requires at least two data points')
xbar = fsum(x) / n
ybar = fsum(y) / n
sxy = fsum((xi - xbar) * (yi - ybar) for xi, yi in zip(x, y))
return sxy / (n - 1)
|
Algopylib
|
/math/stat.py
|
stat.py
|
from __future__ import annotations
from decimal import Decimal
def det2(matrix: list[list[float]]) -> float:
''' Determinant of a 2x2 matrix. '''
return matrix[0][0] * matrix[1][1] - matrix[1][0] * matrix[0][1]
def det3(matrix: list[list[float]]) -> float:
''' Determinant of a 3x3 matrix. '''
return ( matrix[0][0] * (matrix[1][1] * matrix[2][2] - matrix[2][1] * matrix[1][2]) -
matrix[0][1] * (matrix[1][0] * matrix[2][2] - matrix[1][2] * matrix[2][0]) +
matrix[0][2] * (matrix[1][0] * matrix[2][1] - matrix[1][1] * matrix[2][0]) )
def inverse_of_matrix_2(matrix: list[list[float]]) -> list[list[float]]:
"""
A matrix multiplied with its inverse gives the identity matrix.
This function finds the inverse of a 2x2 matrix.
If the determinant of a matrix is 0, its inverse does not exist.
Sources for fixing inaccurate float arithmetic:
https://stackoverflow.com/questions/6563058/how-do-i-use-accurate-float-arithmetic-in-python
https://docs.python.org/3/library/decimal.html
>>> inverse_of_matrix([[2, 5], [2, 0]])
[[0.0, 0.5], [0.2, -0.2]]
>>> inverse_of_matrix([[2.5, 5], [1, 2]])
Traceback (most recent call last):
...
ValueError: This matrix has no inverse.
>>> inverse_of_matrix([[10, 5], [3, 2.5]])
[[0.25, -0.5], [-0.3, 1.0]]
"""
D = Decimal # An abbreviation to be conciseness
# Calculate the determinant of the matrix
determinant = D(det2(matrix))
if determinant == 0:
raise ValueError("This matrix has no inverse.")
# Creates a copy of the matrix with swapped positions of the elements
swapped_matrix = [[0.0, 0.0], [0.0, 0.0]]
swapped_matrix[0][0], swapped_matrix[1][1] = matrix[1][1], matrix[0][0]
swapped_matrix[1][0], swapped_matrix[0][1] = -matrix[1][0], -matrix[0][1]
# Calculate the inverse of the matrix
return [[float(D(n) / determinant) or 0.0 for n in row] for row in swapped_matrix]
def inverse_of_matrix_3(matrix: list[list[float]]) -> list[list[float]]:
invDet = 1 / det3;
D = Decimal # An abbreviation to be conciseness
# Calculate the determinant of the matrix
determinant = D(det3(matrix))
if determinant == 0:
raise ValueError("This matrix has no inverse.")
temp = [[0.0, 0.0,0.0],[0.0, 0.0,0.0], [0.0, 0.0,0.0]]
temp[0][0] = (matrix[1][1] * matrix[2][2] - matrix[2][1] * matrix[1][2]) * invDet
temp[0][1] = (matrix[0][2] * matrix[2][1] - matrix[0][1] * matrix[2][2]) * invDet
temp[0][2] = (matrix[0][1] * matrix[1][2] - matrix[0][2] * matrix[1][1]) * invDet
temp[1][0] = (matrix[1][2] * matrix[2][0] - matrix[1][0] * matrix[2][2]) * invDet
temp[1][1] = (matrix[0][0] * matrix[2][2] - matrix[0][2] * matrix[2][0]) * invDet
temp[1][2] = (matrix[1][0] * matrix[0][2] - matrix[0][0] * matrix[1][2]) * invDet
temp[2][0] = (matrix[1][0] * matrix[2][1] - matrix[2][0] * matrix[1][1]) * invDet
temp[2][1] = (matrix[2][0] * matrix[0][1] - matrix[0][0] * matrix[2][1]) * invDet
temp[2][2] = (matrix[0][0] * matrix[1][1] - matrix[1][0] * matrix[0][1]) * invDet
return temp
|
Algopylib
|
/math/matrix.py
|
matrix.py
|
import math
def nth_term_ap(a: float, d: float, n: int) -> float:
'''
Returns the nth term of an Arithmetic Progression.
Parameters:
a: First term of Progression
d: Common difference of the Progression
n: nth term to be calculated
Returns:
The value of the nth term
'''
if n<0:
raise ValueError("n cannot be negative")
return (a + (n - 1) * d)
def nth_term_gp(a: float, r: float, n: int) -> float:
'''
Returns the nth term of a Geometric Progression.
Parameters:
a: First term of Progression
r: Common ratio of the Progression
n: nth term to be calculated
Returns:
The value of the nth term
'''
if n<0:
raise ValueError("n cannot be negative")
return ( a * (int)(math.pow(r, n - 1)) )
def sum_ap(a: float, d: float, n: int) -> float:
'''
Returns the sum upto n terms of an Arithmetic Progression.
Parameters:
a: First term of Progression
d: Common difference of the Progression
n: Value (int) upto which sum is to be calculated
Returns:
The value of the nth term.
'''
if n<0:
raise ValueError("n cannot be negative")
return ((n/2) * (2 * a + (n - 1) * d))
def sum_gp(a: float, r: float, n: int) -> float:
'''
Returns the sum upto n terms of a Geometric Progression.
Parameters:
a: First term of Progression
r: Common ratio of the Progression
n: Value (int) upto which sum is to be calculated
Returns:
The value of the nth term
'''
if n<0:
raise ValueError("n cannot be negative")
total = 0
value = a
for i in range(n):
total = total + value
value = value * r
return total
|
Algopylib
|
/math/series.py
|
series.py
|
import math
def distance(x1: float, y1: float, x2: float, y2: float) -> float:
"""
Finds distance between two given points
Parameters:
x1, y1 : The x and y coordinates of first point
x2, y2 : The x and y coordinates of second point
Returns:
Distance upto two decimal places.
"""
distance = math.sqrt( ((x1-x2)**2)+((y1-y2)**2) )
return round(distance,2)
def is_collinear(x1: float, y1: float , x2: float, y2: float, x3: float, y3: float) -> bool:
"""
Finds whether given three points are collinear.
Parameters:
x1, y1 : The x and y coordinates of first point
x2, y2 : The x and y coordinates of second point
x3, y3 : The x and y coordinates of third point
Returns:
True if the points are collinear, otherwise False
"""
a = x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)
if a==0:
return True
else:
return False
def eqn_of_line(x1: float, y1: float, x2: float, y2: float) -> str:
"""
Finds equation of a line passing through two given points.
Parameters:
x1, y1 : The x and y coordinates of first point
x2, y2 : The x and y coordinates of second point
Returns:
Equation of the line as a string.
"""
a = y2 - y1
b = x1 - x2
c = a*(x1) + b*(y1)
if b<0:
s=( f"{a}x - {abs(b)}y = {c}")
else:
s=( f"{a}x + {b}y = {c}")
return s
def is_inside_circle(circle_x: float, circle_y: float, rad: float, x: float, y: float) -> bool:
"""
Finds if a given point lies on or inside the circle, or outside the circle.
Parameters:
circle_x, circle_y : Coordinates of center of circle
rad: radius of circle
x, y : Coordinates of test point
Returns:
True if the point lies on or inside the circle, False if outside the circle.
"""
d = (x - circle_x) * (x - circle_x) +(y - circle_y) * (y - circle_y)
if d<= rad*rad:
return True
else:
return False
def area_triangle(side1: float, side2: float, side3: float) -> float:
"""
Calculates area of triangle using Heron's Formula when the length of 3 sides are known.
Parameters:
side1, side2, side3 : Lengths of three sides of the triangle.
Returns:
Area of the triangle upto two decimal values.
"""
if side1 < 0 or side2 < 0 or side3 < 0:
raise ValueError("Length of side can only be a non-negative value")
elif side1 + side2 < side3 or side1 + side3 < side2 or side2 + side3 < side1:
raise ValueError("Given three sides do not form a triangle")
semi_per = (side1 + side2 + side3) / 2
area = math.sqrt((semi_per) * (semi_per - side1) * (semi_per - side2) * (semi_per - side3) )
return round(area , 2)
|
Algopylib
|
/math/geometry.py
|
geometry.py
|
# ASP
Functions dedicated to The Algoritm Selection problem with a focus on a metalearning approach
On the preprocessing file there are three functions dedicated to prepare datasets to be used in later functions. Being then:
load_database_aff: that as made to load OpenML datasets.
load_database: that loads a batch of csv files into a list of pandas dataframes.
preprocess_database: that modify a list of datasets into a format susceptible to extract metafeatures
On the trainingdata file there is a class and two function, dedicated to extracting metafeatures and rankings of datasets. Being then:
Feature_Extractor: function that takes metafeatures based on distance and correlation described in [inset reference]
Clustering Evaluation: class that compares and rank the results of clustering performed in a dataset according to seven clustering algorithms of sklearn. It's useful on its own independent of other function on this repository
rank_database: apply the other functions on this file to all datasets in a database and return prediction data that can be used to predict the ranking of unknown datasets. It is very slow but needsto be only used once
Note1: rank database is useful if you have your own database and plans to expand it in the futures. The more coherent a database is the better the results of the predictions will be. For example: if your database is made of health data to predict if a person is susceptible to a hearth attack, the resulting data will be good at prediction exactly that for new datasets. but if you try to use that data to predict the ranking of a car crash dataset, your predictions will be of mark.
Note2: test_data.csv is a precomputed prediction data made with a mix of real datasets that can be used to make your predictions and skip this whole process.
On the metalearnig file there are four function, dedicated to making prediction based on metadata. Being then:
split_test_data: auxiliary function that can split a prediction data into training and testing data.
NN_weigth: auxiliary function that will use nearest neighbours to compare and metafeatures data of known data sets with a unkown one and make a weight vector based on it
predicting_rank: will use NN_weigth and prediction data 'X' to predict the ranking of an dataset with metafeatures 'y'
get_all_predictions: is a function that will split the prediction data n times and make predictions on the ranking of each instance and calculate correlation of each one with its true ranking
Note3: get_all_predictions is useful to test how well your own prediction database is performing and find a good value of the number of neighbours that works for many datasets.
Note4: all_scores.csv is the result of applying get_all_predictions to test_data.csv
|
Algorithm-Selection
|
/Algorithm-Selection-0.0.5.tar.gz/Algorithm-Selection-0.0.5/README.md
|
README.md
|
# Algorithm Abstract Data Types
Finlay's package for Abstract Data Types written for Algorithmics class
## Installation
Run the following command in your terminal:
`pip install AlgorithmADTs`
AlgorithmADTs can now be imported into your python scripts!
I recommend `from AlgorithmADTs import *` to include all functionality, but you can also import from `AlgorithmADTs.AbstractDataTypes` or `AlgorithmADTs.GraphAlgorithms`
## ADTS:
```
Array
create: Integer -> Array
set: Array x Integer x Element -> Array
get: Array x Integer -> Element
```
```
List
create: None -> List
is_empty: Array -> Boolean
set: Array x Integer x Element -> List
get: Array x Integer -> Element
append: Array x Element -> List
```
```
Stack
create: None -> Stack
push: Stack x Element -> Stack
pop: Stack -> Stack
is_empty: Stack -> Boolean
head: Stack -> Element
```
```
Queue
create: None -> Queue
enqueue: Queue x Element -> Queue
dequeue: Queue -> Queue
is_empty: Queue -> Boolean
head: Queue -> Element
```
```
PriorityQueue
create: None -> Priority Queue
enqueue: Priority Queue x Element x Integer -> Priority Queue
dequeue: Priority Queue -> Priority Queue
is_empty: Priority Queue -> Boolean
head: Priority Queue -> Element
```
```
Dictionary
create: None -> Dictionary
get: Dictionary x Element -> Element
set: Dictionary x Element x Element -> Dictionary
add: Dictionary x Element x Element -> Dictionary
remove: Dictionary x Element -> Dictionary
has_key: Dictionary x Element -> Boolean
is_empty: Dictionary -> Boolean
```
```
Graph
create: None -> Graph
add_node: Graph x Element -> Graph
add_edge: Graph x Element x Element -> Graph
adjacent: Graph x Element x Element -> Boolean
neighbours: Graph x Element -> List
```
Multiple nodes and edges can now be added at one time with `add_nodes` and `add_edges`, using an iterable
```
WeightedGraph (inherits from Graph)
create: None -> Graph
add_node: Graph x Element -> Graph
add_edge: Graph x Element x Element -> Graph
adjacent: Graph x Element x Element -> Boolean
neighbours: Graph x Element -> List
get_weight: Graph x Element x Element -> integer
```
Note that there is no restriction in these classes that elements be hashable, unlike some Python data types
e.g. a Python `dict` requires keys to be hashable.
It also defines a variable `infinity`, set equal to `float('inf')`
The following magic methods are supported:
- `__getitem__` and `__setitem__` for classes with a 'get' and 'set' function.
This allows you to call `instance[key]` and `instance[key] = value`.
- `__iter__` for Array and List, which operates as expected. Dictionary iter returns an iterable of keys.
This enables iterating through a class like `for elem in instance`
- `__str__` and `__repr__` are defined for all classes except graphs and allow for classes to be easily viewed through printing
Note that only the head element is visible for a stack or queue, so it is the only information that can be returned by these methods
- Numerical magic methods (e.g. `__add__`) are defined for matrices
## Graph Algorithms
Currently, the following graph algorithms are defined:
- Prim's algorithm for computing the Minimal Spanning Tree of a weighted, undirected graph
- Dijkstra's algorithm for finding the single source shortest path in a weighted graph
- The Bellman-Ford algorithm which extends the functionality of Dijkstra's algorithm to allow for negative weights
- The two variants of the Floyd-Warshall algorithm to calculate shortest path between all nodes and transitive closure of an unweighted graph
- The PageRank algorithm for determining the relative importance of nodes in an unweighted graph
## Version things
To implement:
- Optional hashing for graphs?
- Search methods like DPS BFS
|
AlgorithmADTs
|
/algorithmadts-0.2.1.tar.gz/algorithmadts-0.2.1/README.md
|
README.md
|
Audio test libs to compute audio quality and 3A performance by objective metrics
pcm,wav inputfiles is allowed,support different samplerate (invalid params are simply corrected to valid ones)
# How to install ?
#Install with pip:
simply use pip to install this toolkit
"pip install algorithmLib"
# Build and Install with git:
first step:
git clone https://g.hz.netease.com/majianli/audiotestalgorithm
second step:
simply run move2sys.prefix.bat before you building the project
# How to use?
#just see ./demos/
def compute_audio_quality(metrics,testFile=None,refFile=None,cleFile=None,samplerate=16000,bitwidth=2,channel=1,refOffset=0,testOffset=0,,maxComNLevel =-48.0,speechPauseLevel=-35.0,audioType=0,
aecStartPoint=0,aecTargetType=0,aecScenario=0,rmsCalsection=None):
"""
:param metrics: G160/P563/POLQA/PESQ/STOI/STI/PEAQ/SDR/SII/LOUDNESS,
# G160 no samplerate limiting, WAV/PCM input , three inputfiles :clean,ref,test no duration limiting;
# P563 8000hz( only 8k spectrum is being calculated), WAV/PCM input , single inputfile, duration < 20s;
# POLQA nb mode 8k swb mode 48k :WAV/PCM input , 2 inputfiles: ref,test: duration < 20s;
# PESQ nb mode 8k wb mode 16k ,WAV/PCM input ,2 inputfiles: ref,test: duration < 20s;
# STOI no samplerate limiting; 2 inputfiles: ref,test, no duration limiting;
# STI >8k(only 8k spectrum is being calculated), WAV/PCM input , 2 inputfiles: ref,test ,duration > 20s;
# PEAQ no samplerate limiting, WAV/PCM ,2 inputfiles: ref, test , no duration limiting;
# SDR no samplerate limiting; WAV/PCM input ,2 inputfiles:ref,test: no duration limiting;
# MATCH no samplerate limiting; WAV/PCM input;3 inputfiles:ref,test,out; no duration limiting;
# MUSIC no samplerate limiting;WAV/PCM input;2 inputfiles:ref,test;no duration limiting;
# TRANSIENT no samplerate limiting; WAV/PCM input;3 inputfiles: cle,noise,test; no duration limiting;
# GAINTABLE , no samplerate limiting; WAV/PCM input;2 inputfiles: ref, test ; specific files;
# ATTACKRELEASE no samplerate limiting; WAV/PCM input;2 inputfiles: ref, test ; specific files;
# MUSICSTA no samplerate limiting; WAV/PCM input;2 inputfiles: ref, test ; no duration limiting;
# AGCDELAY no samplerate limiting; WAV/PCM input 2 inputfiles:ref ,test; no duration limiting;
# MATCHAEC no samplerate limiting; WAV/PCM input;3 inputfiles:ref,mic,test; no duration limiting;
# ELRE no samplerate limiting; WAV/PCM input;3 inputfiles:mic,ref,test; no duration limiting;
# SLIENCE no samplerate limiting; WAV/PCM/MP4 input;1 input file:test; no duration limiting;
# FORMAT no samplerate limiting; WAV/MP4 input;1 input file:test; no duration limiting;
# AECMOS no samplerate limiting; WAV/PCM input ;3 inputfiles:mic,ref,test ; no duration limiting;
# AIMOS no samplerate limiting; WAV/PCM input ;1 input file: test; no duration limiting;
# TRMS no samplerate limiting; WAV/PCM input ;1 input file:test; no duration limiting;
# ARMS no samplerate limiting; WAV/PCM input ;1 input file:test; no duration limiting;
# NOISE no samplerate limiting; WAV/PCM input ;1 input file:ref、test; no duration limiting;
there are different params for different metrics,if params you speend is valid,they will be corrected to valid ones while calculating
:param testFile: the files under test ,
:param refFile: the reference file ,FR metrics need POLQA/PESQ/PEAQ
:param cleFile: clean file ,G160 need
:param noiseFile noise file,option, TRANSIENT NOISE
:param outFile out file,option, MATCH SIG
:param samplerate: samplerate,option ,pcmfile default = 16000
:param bitwidth: bitwidth, option,pcmfile default = 2
:param channel: channels, option,pcmfile default = 1
:param refOffset: offset for ref file,option
:param testOffset: offset for test file ,option
:param maxComNLevel: G160
:param speechPauseLevel G160
:param audioType 0:speech 1:music MATCH/GAINTABLE
:param aecStartPoint AECMOS
:param aecTargetType 0:Chiness 1:English 2:Single Digit 3:Music MATCHAEC/ELRE
:param aecScenario aec mos 0:'doubletalk_with_movement', 1:'doubletalk', 2:'farend_singletalk_with_movement', 3:'farend_singletalk', 4:'nearend_singletalk'
:param rmsCalsection TRMS/ARMS
:return
"""
# PESQ example
src = "a.pcm"
test = "b.pcm"
score = compute_audio_quality('PESQ',testFile=test,refFile=src,samplerate=16000)
or
src = "a.wav"
test = "b.wav"
score = compute_audio_quality('PESQ',testFile=test,refFile=src)
# G160 example
src = "a.wav"
test = "b.wav"
cle = "c.wav"
tnlr,nplr,snri,dsn = compute_audio_quality("G160",testFile=test,refFile=src,cleFile=cle)
or
src = "a.pcm"
test = "b.pcm"
cle = "c.pcm"
tnlr,nplr,snri,dsn = compute_audio_quality("G160",testFile=test,refFile=src,cleFile=cle,samplerate=48000)
#p563 example
test = "a.wav"
Mos,SpeechLevel,Snr,NoiseLevel = compute_audio_quality('P563',testFile=test)
or
test = "a.pcm"
Mos,SpeechLevel,Snr,NoiseLevel = compute_audio_quality('P563',testFile=test,samplerate=32000)
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/README.md
|
README.md
|
from operator import methodcaller
from computeAudioQuality.mainProcess import computeAudioQuality
from ctypes import *
def compute_audio_quality(metrics, testFile=None, refFile=None,micFile=None,cleFile=None, aecCaliFile=None,outFile=None, noiseFile=None,
samplerate=16000, bitwidth=2, channel=1, refOffset=0, testOffset=0, maxComNLevel=-48.0,
speechPauseLevel=-35.0,audioType=0,aecStartPoint=0,aecTargetType=0,aecScenario=0,rmsCalsection=None,polqaMode=0,pitchLogMode=1,fineDelaySection=None):
"""
:param metrics: G160/P563/POLQA/PESQ/STOI/STI/PEAQ/SDR/SII/LOUDNESS/MUSIC/MATCH/
TRANSIENT/GAINTABLE/ATTACKRELEASE/MUSICSTA/AGCDELAY/MATCHAEC/
ELRE/SLIENCE/FORMAT/AECMOS/AIMOS/TRMS/ARMS/NOISE/CLIP/DELAY/ECHO/SPEC/PITCH/EQ,必选项
# G160 无采样率限制; WAV/PCM输入 ;三端输入: clean、ref、test;无时间长度要求;
# P563 8000hz(其他采样率会强制转换到8khz); WAV/PCM输入 ;单端输入: test;时长 < 20s;
# POLQA 窄带模式 8k 超宽带模式 48k ;WAV/PCM输入 ;双端输入:ref、test;时长 < 20s;
# PESQ 窄带模式 8k 宽带模式 16k ;WAV/PCM输入 ;双端输入:ref、test;时长 < 20s;
# STOI 无采样率限制; 双端输入:ref、test;无时间长度要求;
# STI >8k(实际会计算8khz的频谱); WAV/PCM输入 ;双端输入:ref、test;时长 > 20s
# PEAQ 无采样率限制;WAV/PCM输入 ;双端输入:ref、test;无时间长度要求;
# SDR 无采样率限制; WAV/PCM输入 ; 双端输入:ref、test;无时间长度要求;
# MATCH 无采样率限制; WAV/PCM输入;三端输入:ref、test、out; 无时间长度要求;
# MUSIC 无采样率限制;WAV/PCM输入;双端输入:ref、test;无时间长度要求;
# TRANSIENT 无采样率限制,WAV/PCM输入;三端输入:cle、noise、test; 无时间长度要求;
# GAINTABLE 无采样率限制,WAV/PCM输入;双端输入:ref、test;固定信号输入;
# ATTACKRELEASE 无采样率限制,WAV/PCM输入;双端输入:ref、test;固定信号输入;
# MUSICSTA 无采样率限制,WAV/PCM输入;双端输入:ref、test;无时间长度要求;
# AGCDELAY 无采样率限制,WAV/PCM输入;双端输入:ref、test;无时间长度要求;
# MATCHAEC 无采样率限制 WAV/PCM输入;三端输入:ref、mic,test,;无时间长度要求;
# ELRE 无采样率限制 WAV/PCM输入;三端输入:mic,ref、test;无时间长度要求;
# SLIENCE 无采样率限制 WAV/PCM/MP4输入;单端输入:test;无时间长度要求;
# FORMAT 无采样率限制 WAV/MP4输入;单端输入:test;无时间长度要求;
# AECMOS 无采样率限制 WAV/PCM输入 ;三端输入:mic,ref、test;无时间长度要求;
# AIMOS 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# TRMS 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# ARMS 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# NOISE 无采样率限制 WAV/PCM输入 ;双端输入:ref、test;无时间长度要求;
# CLIP 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# DELAY 无采样率限制; WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# ECHO 无采样率限制; WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# SPEC 无采样率限制; WAV/PCM输入;单端输入:test; 无时间长度要求;
# PITCH 无采样率限制;WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# EQ 无采样率限制;WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
不同指标输入有不同的采样率要求,如果传入的文件不符合该指标的要求,会自动变采样到合法的区间
:param testFile: 被测文件,必选项
:param refFile: 参考文件,可选项,全参考指标必选,比如POLQA/PESQ/PEAQ
:param micFile: micIN,可选项,回声指标必选,MATCHAEC/ELRE/AECMOS
:param cleFile: 干净语音文件,可选项,G160,TRANSIENT需要
:param noiseFile 噪声文件,可选项,突发噪声信噪比计算需要
:param aecCaliFile 用于做AEC对齐的校准文件 MATCHAEC专用
:param outFile 输出文件,可选项,对齐文件可选
:param samplerate: 采样率,可选项,pcm文件需要 default = 16000
:param bitwidth: 比特位宽度,可选项,pcm文件需要 default = 2
:param channel: 通道数,可选项,pcm文件需要 default = 1
:param refOffset: ref文件的样点偏移,可选项,指标G160需要
:param testOffset: test文件的样点偏移,可选项,指标G160需要
:param maxComNLevel: 测试G160文件的最大舒适噪声
:param speechPauseLevel 测试G160文件的语音间歇段的噪声
:param audioType 输入音频的模式 0:语音 1:音乐 MATCH/GAINTABLE需要
:param aecStartPoint 计算AECMOS,选择从第几秒开始计算
:param aecTargetType 0:Chiness 1:English 2:Single Digit 3:Music 计算MATCHAEC/ELRE
:param aecScenario 计算aec mos专用 0:'doubletalk_with_movement', 1:'doubletalk', 2:'farend_singletalk_with_movement', 3:'farend_singletalk', 4:'nearend_singletalk'
:param rmsCalsection 计算rms的区间 TRMS和ARMS需要,时间单位s,比如:[1,20]
:param polqaMode 计算polqa的模式 0:默认模式 1: 理想模式:排除小声音的影响,把声音校准到理想点平 -26db
:param pitchLogMode 计算pitch的模式 0:线性模式,用于SetLocalVoicePitch接口; 1:对数模式,用于SetAudioMixingPitch接口;默认为1
:param fineDelaySection 精准计算延时(MTACH3),需要手动标出语音块的位置,比如有三段:speech_section=[[2.423,4.846],[5.577,7.411],[8,10.303]]
:return:
"""
paraDicts = {
'metrics':metrics,
'testFile':testFile,
'refFile':refFile,
'micFile':micFile,
'cleFile':cleFile,
'noiseFile':noiseFile,
'aecCaliFile':aecCaliFile,
'outFile':outFile,
'samplerate':samplerate,
'bitwidth':bitwidth,
'channel':channel,
'refOffset':refOffset,
'testOffset':testOffset,
'maxComNLevel':maxComNLevel,
"speechPauseLevel":speechPauseLevel,
"audioType":audioType,
"aecStartPoint":aecStartPoint,
"aecTargetType":aecTargetType,
'aecScenario':aecScenario,
'rmsCalsection':rmsCalsection,
'polqaMode': polqaMode,
"pitchLogMode": pitchLogMode,
"fineDelaySection": fineDelaySection
}
comAuQUA = computeAudioQuality(**paraDicts)
return methodcaller(metrics)(comAuQUA)
if __name__ == '__main__':
# speech = r'D:\AutoWork\audiotestalgorithm\algorithmLib\SNR_ESTIMATION\speech.wav'
# music = r'D:\AutoWork\audiotestalgorithm\algorithmLib\SNR_ESTIMATION\music_rap.wav'
# transi = r'D:\AutoWork\audiotestalgorithm\algorithmLib\SNR_ESTIMATION\transientNoise.wav'
# test = r'D:\AutoWork\audiotestalgorithm\algorithmLib\SNR_ESTIMATION\test.wav'
# res = compute_audio_quality('MUSIC',refFile=speech,testFile=music)
#
# print(res)
#
# res = compute_audio_quality('TRANSIENT',cleFile=speech,noiseFile=transi,testFile=test)
# print(res)
#
# res = compute_audio_quality('MATCH',refFile=speech,testFile=test,outFile='123.wav')
# print(res)
#print(compute_audio_quality('G160', testFile=src,refFile=src,samplerate=16000))
#print(match_sig(refFile='speech.wav', targetFile='test.wav', outFile='outfile.wav'))
import time
for a in range(200):
time.sleep(1)
src = r'E:\audioalgorithm\audiotestalgorithm\demos\02_p563_demo\cleDstFile.wav'
print(compute_audio_quality('P563',testFile=src))
exit(0)
file = r'C:\Users\vcloud_avl\Downloads\agc_eva\speech_attackrelease.wav'
test = r'C:\Users\vcloud_avl\Downloads\agc_eva\test_attackrelease.wav'
print(compute_audio_quality('ATTACKRELEASE',refFile=file,testFile=test))
file = r'C:\Users\vcloud_avl\Downloads\agc_eva\speech_gaintable.wav'
test = r'C:\Users\vcloud_avl\Downloads\agc_eva\test.wav'
lim,gain_table,DR = compute_audio_quality('GAINTABLE',refFile=file,testFile=test,audioType=1)
print(lim,gain_table[0],DR[2])
for a in gain_table:
print(a)
for a in DR:
print(a)
file = r'C:\Users\vcloud_avl\Downloads\agc_eva\music_stability_.wav'
test = r'C:\Users\vcloud_avl\Downloads\agc_eva\test_music_stability.wav'
res = compute_audio_quality('MUSICSTA',refFile=file, testFile=test)
for a in res:
print(a)
file = r'C:\Users\vcloud_avl\Downloads\agc_eva\speech_gaintable.wav'
test = r'C:\Users\vcloud_avl\Downloads\agc_eva\test.wav'
delay = compute_audio_quality('AGCDELAY',refFile=file,testFile=test)
print(delay)
compute_audio_quality('MATCH',refFile=file,testFile=test,outFile='out.wav',audioType=1)
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/main.py
|
main.py
|
from .formatConvert.wav_pcm import wav2pcm
from .G160.G160 import cal_g160
from .P563.P563 import cal_563_mos
from .PESQ.PESQ import *
from .POLQA.polqa_client import polqa_client_test
from .SDR.SDR import cal_sdr
from .STI.cal_sti import cal_sti
from .STOI.STOI import *
from .resample.resampler import resample,restruct
from .timeAligment.time_align import cal_fine_delay
from .commFunction import get_data_array,get_file_path,get_rms,convert_error_header,make_out_file
from .PCC.Pearson_CC import cal_PCC,get_max_cc_by_dll
from .Noise_Suppression.noiseFuction import cal_noise_Supp,cal_noise_Supp_by_ref
from .SNR_ESTIMATION.MATCH_SIG import match_sig
from .SNR_ESTIMATION.SNR_MUSIC import cal_snr_music
from .SNR_ESTIMATION.SNR_TRANSIENT import cal_snr_transient
from .AGC_EVALUATION.CAL_GAIN_TABLE import cal_gain_table
from .AGC_EVALUATION.CAL_ATTACK_RELEASE import cal_attack_release
from .AGC_EVALUATION.CAL_MUSIC_STABILITY import cal_music_stablility
from .AGC_EVALUATION.CAL_DELAY import cal_DELAY
from .AEC_EVALUATION.MATCH_AEC import MATCH_AEC
from .AEC_EVALUATION.ERLE_ETSIMATION import cal_erle
from .FUNCTION.audioFunction import isSlience,audioFormat,get_effective_spectral,cal_pitch,cal_EQ
from .AEC_MOS.aecmos import cal_aec_mos
from .MOS_INFER.run_predict import cal_mos_infer
from .Noise_Suppression.noiseFuction import cal_noise_Supp
from .CLIPPING_DETECTION.audio_clip_detection import cal_clip_index
from .AEC_EVALUATION.FR_ECHO_DETECT import cal_fullref_echo
from .VAD_NN.hubconf import silero_vad
from operator import methodcaller
from .computeAudioQuality.mainProcess import computeAudioQuality
from ctypes import *
def compute_audio_quality(metrics,testFile=None,refFile=None,micFile=None,cleFile=None,aecCaliFile=None,outFile=None,noiseFile=None,samplerate=16000,
bitwidth=2,channel=1,refOffset=0,testOffset=0,maxComNLevel =-48.0,speechPauseLevel=-35.0,audioType=0,
aecStartPoint=0,aecTargetType=0,aecScenario=0,rmsCalsection=None,polqaMode=0,pitchLogMode=1,fineDelaySection=None):
"""
:param metrics: G160/P563/POLQA/PESQ/STOI/STI/PEAQ/SDR/SII/LOUDNESS/MUSIC/MATCH/
TRANSIENT/GAINTABLE/ATTACKRELEASE/MUSICSTA/AGCDELAY/MATCHAEC/
ERLE/SLIENCE/FORMAT/AECMOS/AIMOS/TRMS/ARMS/NOISE/CLIP/DELAY/ECHO/SPEC/PITCH/EQ,必选项
# G160 无采样率限制; WAV/PCM输入 ;三端输入: clean、ref、test;无时间长度要求;
# P563 8000hz(其他采样率会强制转换到8khz); WAV/PCM输入 ;单端输入: test;时长 < 20s;
# POLQA 窄带模式 8k 超宽带模式 48k ;WAV/PCM输入 ;双端输入:ref、test;时长 < 20s;
# PESQ 窄带模式 8k 宽带模式 16k ;WAV/PCM输入 ;双端输入:ref、test;时长 < 20s;
# STOI 无采样率限制; 双端输入:ref、test;无时间长度要求;
# STI >8k(实际会计算8khz的频谱); WAV/PCM输入 ;双端输入:ref、test;时长 > 20s
# PEAQ 无采样率限制;WAV/PCM输入 ;双端输入:ref、test;无时间长度要求;
# SDR 无采样率限制; WAV/PCM输入 ; 双端输入:ref、test;无时间长度要求;
# MATCH 无采样率限制; WAV/PCM输入;三端输入:ref、test、out; 无时间长度要求;
# MUSIC 无采样率限制;WAV/PCM输入;双端输入:ref、test;无时间长度要求;
# TRANSIENT 无采样率限制,WAV/PCM输入;三端输入:cle、noise、test; 无时间长度要求;
# GAINTABLE 无采样率限制,WAV/PCM输入;双端输入:ref、test;固定信号输入;
# ATTACKRELEASE 无采样率限制,WAV/PCM输入;双端输入:ref、test;固定信号输入;
# MUSICSTA 无采样率限制,WAV/PCM输入;双端输入:ref、test;无时间长度要求;
# AGCDELAY 无采样率限制,WAV/PCM输入;双端输入:ref、test;无时间长度要求;
# MATCHAEC 无采样率限制 WAV/PCM输入;三端输入:ref、mic,test,;无时间长度要求;
# ELRE 无采样率限制 WAV/PCM输入;三端输入:mic,ref、test;无时间长度要求;
# SLIENCE 无采样率限制 WAV/PCM/MP4输入;单端输入:test;无时间长度要求;
# FORMAT 无采样率限制 WAV/MP4输入;单端输入:test;无时间长度要求;
# AECMOS 无采样率限制 WAV/PCM输入 ;三端输入:mic,ref、test;无时间长度要求;
# AIMOS 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# TRMS 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# ARMS 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# NOISE 无采样率限制 WAV/PCM输入 ;双端输入:ref、test;无时间长度要求;
# CLIP 无采样率限制 WAV/PCM输入 ;单端输入:test;无时间长度要求;
# DELAY 无采样率限制; WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# ECHO 无采样率限制; WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# SPEC 无采样率限制; WAV/PCM输入;单端输入:test; 无时间长度要求;
# PITCH 无采样率限制;WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# EQ 无采样率限制;WAV/PCM输入;双端输入:ref、test; 无时间长度要求;
# MATCH2 无采样率限制; WAV/PCM输入;三端输入:ref、test、out; 无时间长度要求;
# MATCH3 无采样率限制; WAV/PCM输入;三端输入:ref、test、out; 无时间长度要求;
不同指标输入有不同的采样率要求,如果传入的文件不符合该指标的要求,会自动变采样到合法的区间
:param testFile: 被测文件,必选项
:param refFile: 参考文件,可选项,全参考指标必选,比如POLQA/PESQ/PEAQ
:param micFile: micIN,可选项,回声指标必选,MATCHAEC/ELRE/AECMOS
:param cleFile: 干净语音文件,可选项,G160,TRANSIENT需要
:param noiseFile 噪声文件,可选项,突发噪声信噪比计算需要
:param aecCaliFile 用于做AEC对齐的校准文件 MATCHAEC专用
:param outFile 输出文件,可选项,对齐文件可选
:param samplerate: 采样率,可选项,pcm文件需要 default = 16000
:param bitwidth: 比特位宽度,可选项,pcm文件需要 default = 2
:param channel: 通道数,可选项,pcm文件需要 default = 1
:param refOffset: ref文件的样点偏移,可选项,指标G160需要
:param testOffset: test文件的样点偏移,可选项,指标G160需要
:param maxComNLevel: 测试G160文件的最大舒适噪声
:param speechPauseLevel 测试G160文件的语音间歇段的噪声
:param audioType 输入音频的模式 0:语音 1:音乐 MATCH/GAINTABLE需要
:param aecStartPoint 计算AECMOS,选择从第几秒开始计算
:param aecTargetType 0:Chiness 1:English 2:Single Digit 3:Music 计算MATCHAEC/ELRE
:param aecScenario 计算aec mos专用 0:'doubletalk_with_movement', 1:'doubletalk', 2:'farend_singletalk_with_movement', 3:'farend_singletalk', 4:'nearend_singletalk'
:param rmsCalsection 计算rms的区间 TRMS和ARMS需要,时间单位s,比如:[1,20]
:param polqaMode 计算polqa的模式 0:默认模式 1: 理想模式:排除小声音的影响,把声音校准到理想点平 -26db
:param pitchLogMode 计算pitch的模式 0:线性模式,用于SetLocalVoicePitch接口; 1:对数模式,用于SetAudioMixingPitch接口;默认为1
:param fineDelaySection 精准计算延时(MTACH3),需要手动标出语音块的位置,比如有三段:speech_section=[[2.423,4.846],[5.577,7.411],[8,10.303]]
:return:
"""
paraDicts = {
'metrics':metrics,
'testFile':testFile,
'refFile':refFile,
'micFile':micFile,
'cleFile':cleFile,
'noiseFile':noiseFile,
'aecCaliFile':aecCaliFile,
'outFile':outFile,
'samplerate':samplerate,
'bitwidth':bitwidth,
'channel':channel,
'refOffset':refOffset,
'testOffset':testOffset,
'maxComNLevel':maxComNLevel,
"speechPauseLevel":speechPauseLevel,
"audioType":audioType,
"aecStartPoint":aecStartPoint,
"aecTargetType":aecTargetType,
'aecScenario':aecScenario,
'rmsCalsection':rmsCalsection,
'polqaMode':polqaMode,
"pitchLogMode":pitchLogMode,
"fineDelaySection":fineDelaySection
}
comAuQUA = computeAudioQuality(**paraDicts)
return methodcaller(metrics)(comAuQUA)
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/__init__.py
|
__init__.py
|
import sys
sys.path.append('../')
import os,time
import socket
import paramiko
from stat import S_ISDIR as isdir
import copy
from ctypes import *
import wave
import numpy as np
import math
import librosa
import numpy as np
class emxArray_real_T(Structure):
_fields_ = [
("pdata", POINTER(c_double)), # c_byte
("psize", POINTER(c_int)), # c_byte
("allocSize", c_int), # c_byte
("NumDimensions", c_int), # c_byte
("canFreeData", c_uint),
]
def convert_error_header(wavfile,channels=1, bits=16, sample_rate=16000):
suffix = os.path.splitext(wavfile)[-1]
assert suffix == '.wav'
newFileName = wavfile[:-4] + '_convertHeader.wav'
pcmf = open(wavfile, 'rb')
pcmdata = pcmf.read()
pcmf.close()
if bits % 8 != 0:
raise ValueError("bits % 8 must == 0. now bits:" + str(bits))
wavfile = wave.open(newFileName, 'wb')
wavfile.setnchannels(channels)
wavfile.setsampwidth(bits // 8)
wavfile.setframerate(sample_rate)
wavfile.writeframes(pcmdata)
wavfile.close()
time.sleep(1)
#os.remove(pcm_file)
return newFileName
def get_data_array(filename):
"""
"""
f = wave.open(filename, "rb")
# 读取格式信息
# 一次性返回所有的WAV文件的格式信息,它返回的是一个组元(tuple):声道数, 量化位数(byte单位), 采样频率, 采样点数, 压缩类型, 压缩类型的描述。wave模块只支持非压缩的数据,因此可以忽略最后两个信息
params = f.getparams()
nchannels, sampwidth, framerate, nframes = params[:4]
# 读取波形数据
# 读取声音数据,传递一个参数指定需要读取的长度(以取样点为单位)
str_data = f.readframes(nframes)
f.close()
return np.frombuffer(str_data, dtype=np.int16),framerate,nchannels
def get_data_array_double(filename):
"""
"""
f = wave.open(filename, "rb")
# 读取格式信息
# 一次性返回所有的WAV文件的格式信息,它返回的是一个组元(tuple):声道数, 量化位数(byte单位), 采样频率, 采样点数, 压缩类型, 压缩类型的描述。wave模块只支持非压缩的数据,因此可以忽略最后两个信息
params = f.getparams()
nchannels, sampwidth, framerate, nframes = params[:4]
# 读取波形数据
# 读取声音数据,传递一个参数指定需要读取的长度(以取样点为单位)
str_data = f.readframes(nframes)
f.close()
return np.frombuffer(str_data, dtype=np.int16).astype(np.double),framerate,nchannels
def get_data_of_ctypes_(inWaveFile=None,int2float=False):
wavf = wave.open(inWaveFile, 'rb')
refChannel,refsamWidth,refsamplerate,refframeCount = wavf.getnchannels(),wavf.getsampwidth(),wavf.getframerate(),wavf.getnframes()
if (refChannel,refsamWidth) != (1,2):
raise TypeError('Different format of ref and test files!')
pcmdata = wavf.readframes(refframeCount)
ref = np.frombuffer(pcmdata,dtype=np.int16)
ref = ref.astype(np.float64)
if int2float:
ref = ref/32768
datastruct = emxArray_real_T()
datastruct.pdata = (c_double * refframeCount)(*ref)
datastruct.psize = (c_int * 1)(*[refframeCount])
wavf.close()
return datastruct,refsamplerate,refframeCount
def get_data_of_ctypes_from_datablock(inData=None,datalen=96000,int2float=False):
ref = np.frombuffer(inData,dtype=np.int16)
ref = ref.astype(np.float64)
if int2float:
ref = ref/32768
datastruct = emxArray_real_T()
datastruct.pdata = (c_double * datalen)(*ref)
datastruct.psize = (c_int * 1)(*[datalen])
return datastruct
def get_none_data_of_ctypes_(dataLength=0):
data = np.array([0.0 for _ in range(dataLength)])
data = data.astype(np.float64)
outStruct = emxArray_real_T()
#outStruct = create_string_buffer(20)
outStruct.pdata = (c_double * dataLength)(*data)
outStruct.psize = (c_int * 1)(*[dataLength])
outStruct.allocSize = dataLength
outStruct.NumDimensions = 1
outStruct.canFreeData = 1
return outStruct
def write_ctypes_data_2_file_(outFile,outStruct,refsamplerate):
outf = wave.open(outFile, 'wb')
outf.setnchannels(1)
outf.setsampwidth(2)
outf.setframerate(refsamplerate)
# 将wav_data转换为二进制数据写入文件
outlist = []
for a in range(outStruct.psize[0]):
outlist.append(int(outStruct.pdata[a]))
outarray = np.array(outlist)
outarray = outarray.astype(np.int16)
outf.writeframes(bytes(outarray))
outf.close()
constMosResult = {'delay':'No Result','mos':'-0.0','Speech Level Gain':'','Noise Level Gain':''}
class commondata():
def __init__(self):
self.mosResult = copy.deepcopy(constMosResult)
self.HOST = '10.219.36.124'
self.machost = '10.219.36.124' #'10.242.167.159'
self.username = 'netease'
self.password = 'Nora3390'
self.PORT = 2159
self.sftpPort = 22
@staticmethod
def get_data():
return {"type": "command",
"module": "clientA",
"method": "requestA",
"samplerate":16000,
"token": "",
"job":None,
"srcFile":'',
"testFile":'',
"result":{},
"err":"No error"}
global_result = commondata()
def log_time():
time_tup = time.localtime(time.time())
# format_time = '%Y-%m-%d_%a_%H-%M-%S'
format_time = '%Y-%m-%d-%H-%M-%S'
cur_time = time.strftime(format_time, time_tup)
return cur_time
def getip():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
vqtip = s.getsockname()[0]
finally:
s.close()
return vqtip
def exec_shell_command(cmd):
ssh = paramiko.SSHClient()
# 把要连接的机器添加到known_hosts文件中
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# 连接服务器
ssh.connect(hostname=serverIP, port=port, username=username, password=password, allow_agent=False,
look_for_keys=False)
stdin, stdout, stderr = ssh.exec_command(cmd, get_pty=True)
result = stdout.read()
ssh.close()
return result
def sftp_connect(username,password,host,port=22):
client = None
sftp = None
try:
client = paramiko.Transport((host,port))
except Exception as error:
print(error)
else:
try:
client.connect(username=username, password=password)
except Exception as error:
print(error)
else:
sftp = paramiko.SFTPClient.from_transport(client)
return client,sftp
def sftp_disconnect(client):
try:
client.close()
except Exception as error:
print(error)
def _check_local(local):
if not os.path.exists(local):
try:
os.mkdir(local)
except IOError as err:
print(err)
def sftp_get(sftp, remote, local):
# 检查远程文件是否存在
try:
result = sftp.stat(remote)
except IOError as err:
error = '[ERROR %s] %s: %s' % (err.errno, os.path.basename(os.path.normpath(remote)), err.strerror)
print(error)
else:
# 判断远程文件是否为目录
if isdir(result.st_mode):
dirname = os.path.basename(os.path.normpath(remote))
local = os.path.join(local, dirname)
_check_local(local)
for file in sftp.listdir(remote):
sub_remote = os.path.join(remote, file)
sub_remote = sub_remote.replace('\\', '/')
sftp_get(sftp, sub_remote, local)
else:
# 拷贝文件
if os.path.isdir(local):
local = os.path.join(local, os.path.basename(remote))
try:
sftp.get(remote, local)
except IOError as err:
print(err)
else:
print('[get]', local, '<==', remote)
def sftp_put(sftp, local, remote):
# 检查路径是否存在
def _is_exists(path, function):
path = path.replace('\\', '/')
try:
function(path)
except Exception as error:
return False
else:
return True
# 拷贝文件
def _copy(sftp, local, remote):
# 判断remote是否是目录
if _is_exists(remote, function=sftp.chdir):
# 是,获取local路径中的最后一个文件名拼接到remote中
filename = os.path.basename(os.path.normpath(local))
remote = os.path.join(remote, filename).replace('\\', '/')
# 如果local为目录
if os.path.isdir(local):
# 在远程创建相应的目录
_is_exists(remote, function=sftp.mkdir)
# 遍历local
for file in os.listdir(local):
# 取得file的全路径
localfile = os.path.join(local, file).replace('\\', '/')
# 深度递归_copy()
_copy(sftp=sftp, local=localfile, remote=remote)
# 如果local为文件
if os.path.isfile(local):
try:
sftp.put(local, remote)
except Exception as error:
print(error)
print('[put]', local, '==>', remote, 'FAILED')
else:
print('[put]', local, '==>', remote, 'SUCCESSED')
# 检查local
if not _is_exists(local, function=os.stat):
print("'" + local + "': No such file or directory in local")
return False
# 检查remote的父目录
remote_parent = os.path.dirname(os.path.normpath(remote))
if not _is_exists(remote_parent, function=sftp.chdir):
print("'" + remote + "': No such file or directory in remote")
return False
# 拷贝文件
_copy(sftp=sftp, local=local, remote=remote)
def get_file_path(root_path,file_list,dir_list):
#获取该目录下所有的文件名称和目录名称
dir_or_files = os.listdir(root_path)
for dir_file in dir_or_files:
#获取目录或者文件的路径
dir_file_path = os.path.join(root_path,dir_file)
#判断该路径为文件还是路径
if os.path.isdir(dir_file_path):
dir_list.append(dir_file_path)
#递归获取所有文件和目录的路径
get_file_path(dir_file_path,file_list,dir_list)
else:
file_list.append(dir_file_path)
def project_root_path(project_name=None):
"""
获取当前项目根路径
:param project_name:
:return: 根路径
"""
PROJECT_NAME = 'audiotestalgorithm' if project_name is None else project_name
project_path = os.path.abspath(os.path.dirname(__file__))
root_path = project_path[:project_path.find("{}\\".format(PROJECT_NAME)) + len("{}\\".format(PROJECT_NAME))]
#print('当前项目名称:{}\r\n当前项目根路径:{}'.format(PROJECT_NAME, root_path))
return root_path
def make_out_file(tarFile,data,fs,channel):
"""
"""
outData = data.astype(np.int16)
wavfile = wave.open(tarFile, 'wb')
wavfile.setnchannels(channel)
wavfile.setsampwidth(2)
wavfile.setframerate(fs)
wavfile.writeframes(outData.tobytes())
wavfile.close()
def get_ave_rms(data):
'''
Parameters
----------
data
Returns
-------
'''
frameLen = 480
nFrames = len(data)//frameLen
totalRms,cnt = 0,0
for a in range(nFrames):
totalRms += get_rms(data[a*frameLen:(a+1)*frameLen])
cnt += 1
return totalRms/cnt
def get_rms(records):
'''
Parameters
----------
records
Returns
-------
'''
#return math.sqrt(sum([x * x for x in records])/len(records))
data = records.astype(np.float32).tolist()
if len(data) == 0:
return -99.9
rms = math.sqrt(sum([(x/32767) * (x/32767) for x in data])/len(data))
dBrmsValue = 20*math.log10(rms + 1.0E-6)
return dBrmsValue
def get_peak_rms(records):
'''
Parameters
----------
records
Returns
-------
'''
#return math.sqrt(sum([x * x for x in records])/len(records))
frameLen = 480
nFrames = len(records)//frameLen
maxRms = -99
for a in range(nFrames):
curRms = get_rms(records[a*frameLen:(a+1)*frameLen])
if curRms > maxRms:
maxRms = curRms
return maxRms
def get_one_channel_data(infile):
"""
:return:
"""
data, fs, chn = get_data_array(infile)
if fs == 48000 and chn == 1:
return data
if chn != 1:
data = np.array([data[n] for n in range(len(data)) if n % chn == 0]).astype(np.int16)
return resample(data,fs,48000)
def resample(data,fs,tarfs):
"""
:return:
"""
if fs == tarfs:
return data
new_signal = librosa.resample(data.astype(np.float32), fs, tarfs)
# augmenter = Resample(min_sample_rate=4000, max_sample_rate=48000, p=1.0)
# samples = augmenter(samples=data.astype(np.float32), sample_rate=tarfs)
return new_signal.astype(np.int16)
def get_file_duration(filename):
"""
:param filename:
:return:
"""
f = wave.open(filename, "rb")
# 读取格式信息
# 一次性返回所有的WAV文件的格式信息,它返回的是一个组元(tuple):声道数, 量化位数(byte单位), 采样频率, 采样点数, 压缩类型, 压缩类型的描述。wave模块只支持非压缩的数据,因此可以忽略最后两个信息
params = f.getparams()
nchannels, sampwidth, framerate, nframes = params[:4]
# 读取波形数据
# 读取声音数据,传递一个参数指定需要读取的长度(以取样点为单位)
f.close()
return nframes/framerate/nchannels,framerate
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/commFunction.py
|
commFunction.py
|
import wave
import sys,os
from os import path
sys.path.append(path.dirname(__file__))
sys.path.append(os.path.dirname(path.dirname(__file__)))
from moviepy.editor import AudioFileClip
from commFunction import get_rms,get_ave_rms,get_one_channel_data,get_file_duration,get_data_array
from formatConvert import pcm2wav
import numpy as np
import scipy.signal as signal
import math
import librosa
from PCC.Pearson_CC import get_max_cc_by_dll
import ctypes,os,platform
from ctypes import *
def get_my_dll():
"""
:return:
"""
mydll = None
cur_paltform = platform.platform().split('-')[0]
if cur_paltform == 'Windows':
mydll = ctypes.windll.LoadLibrary(sys.prefix + '/pcc.dll')
if cur_paltform == 'macOS':
mydll = CDLL(sys.prefix + '/pcc.dylib')
if cur_paltform == 'Linux':
mydll = CDLL(sys.prefix + '/pcc.so')
return mydll
def get_wav_from_mp4(mp4file):
"""
Parameters
----------
mp4file
Returns
-------
"""
suffix = os.path.splitext(mp4file)[-1]
if suffix != '.mp4':
raise TypeError('wrong format! not mp4 file!' + str(suffix))
my_audio_clip = AudioFileClip(mp4file)
newFileName = mp4file[:-4] + '.wav'
my_audio_clip.write_audiofile(newFileName)
return newFileName
def isSlience(Filename =None,section=None,channels=1, bits=16, sample_rate=16000):
"""
Parameters
----------
Filename 支持 wav 和 pcm 和MP4
Returns
-------
"""
suffix = os.path.splitext(Filename)[-1]
if suffix == '.mp4':
Filename = get_wav_from_mp4(Filename)
if suffix == '.pcm':
Filename = pcm2wav(Filename,channels,bits,sample_rate)
if suffix == '.wav':
pass
lenth,fs = get_file_duration(Filename)
data = get_one_channel_data(Filename)
if section is None:
startTime = 0
endTime = lenth
else:
startTime = section[0]
endTime = section[1]
if startTime > lenth or startTime > endTime:
raise TypeError('start point is larger than the file lenth :' + str(suffix))
if endTime > lenth:
endTime = lenth
ins = data[int(startTime*fs):int(endTime*fs)]
dBrmsValue = get_rms(ins)#20*math.log10(get_rms(ins)/32767+ 1.0E-6)
print(dBrmsValue)
if dBrmsValue > -70:
return False
else:
for n in range(len(ins)//480):
curdata = ins[480*n:480*(n+1)]
dBrmsValue = get_rms(curdata)#20 * math.log10(get_rms(curdata) / 32767 + 1.0E-6)
print(dBrmsValue)
if dBrmsValue > -60:
return False
return True
pass
def audioFormat(wavFileName=None):
"""
wavFileName:输入文件 wav,mp4
Returns
-------
refChannel:通道数
refsamWidth:比特位 2代表16bit
refsamplerate:采样率
refframeCount:样点数
"""
suffix = os.path.splitext(wavFileName)[-1]
if suffix != '.wav' and suffix != '.mp4':
raise TypeError('wrong format! not wav/mp4 file!' + str(suffix))
if suffix == '.mp4':
wavFileName = get_wav_from_mp4(wavFileName)
wavf = wave.open(wavFileName, 'rb')
refChannel,refsamWidth,refsamplerate,refframeCount = wavf.getnchannels(),wavf.getsampwidth(),wavf.getframerate(),wavf.getnframes()
return refChannel,refsamWidth*8,refsamplerate,refframeCount
def get_rms_level(wavFileName=None,rmsMode='total',section=None):
"""
wavFileName:输入文件 wav,mp4
Returns
-------
refChannel:通道数
refsamWidth:比特位 2代表16bit
refsamplerate:采样率
refframeCount:样点数
"""
suffix = os.path.splitext(wavFileName)[-1]
if suffix != '.wav':
raise TypeError('wrong format! not wav file!' + str(suffix))
lenth,fs = get_file_duration(wavFileName)
data = get_one_channel_data(wavFileName)
if section == None:
startTime = 0
endTime = lenth
else:
startTime = section[0]
endTime = section[1]
if startTime > lenth or startTime > endTime:
raise TypeError('start point is larger than the file lenth :' + str(suffix))
if endTime > lenth:
endTime = lenth
curdata = data[int(startTime*fs):int(endTime*fs)]
if rmsMode == 'total':
return get_rms(curdata)
if rmsMode == 'average':
return get_ave_rms(curdata)
return None
def calculate_band_energy(audio_signal, sample_rate, num_bands,freq_mode='upper'):
# Perform FFT on audio signal
fmin,fmax = 100,sample_rate/2 - 100
freq_points = np.linspace(0, sample_rate/2, num_bands+1)
#freq_points = np.logspace(0, sample_rate / 2, num_bands + 1)
# Compute center frequencies of each band
center_freqs = freq_points[:-1] + np.diff(freq_points)/2
# Compute bandwidth of each band
bandwidths = np.diff(freq_points)
# Compute lower and upper frequency limits of each band
lower_freqs = center_freqs - bandwidths/2
upper_freqs = center_freqs + bandwidths/2
# Clip lower and upper frequency limits to specified range
lower_freqs = np.clip(lower_freqs, fmin, fmax)
upper_freqs = np.clip(upper_freqs, fmin, fmax)
# Define bandpass filter for each frequency band
b, a = signal.butter(4, [lower_freqs[0], upper_freqs[-1]], 'bandpass', fs=sample_rate)
band_filters = [signal.butter(4, [lower_freqs[i], upper_freqs[i]], 'bandpass', fs=sample_rate) for i in range(num_bands)]
# Apply each bandpass filter to the FFT signal
band_signals = [signal.filtfilt(band_filters[i][0], band_filters[i][1], audio_signal.astype(np.float32)) for i in range(num_bands)]
# Calculate energy for each frequency band
band_energy = [ get_rms(band_signals[i]) for i in range(num_bands)]
if freq_mode == 'upper':
return band_energy,upper_freqs
if freq_mode == 'lower':
return band_energy,lower_freqs
if freq_mode == 'centre':
return band_energy,center_freqs
def calculate_octave_band_energy(audio_signal,sample_rate):
# Perform FFT on audio signal
# Calculate the number of bands
num_bands = 10
center_freqs = [31,62,125,250,500,1000,2000,4000,8000,16000]
sample_rate_list = [100,200,400,1000,1600,3000,6000,12000,24000,48000]
# Print the band limits
lower_freqs = []
upper_freqs = []
for i in range(len(center_freqs)):
upper_freqs.append(center_freqs[i]*2**(0.5))
lower_freqs.append(center_freqs[i]/(2**(0.5)))
# Define bandpass filter for each frequency band
#b, a = signal.butter(4, [lower_freqs[0], upper_freqs[0]], 'bandpass', fs=8000)
band_filters = [signal.butter(4, [lower_freqs[i], upper_freqs[i]], 'bandpass', fs=sample_rate_list[i]) for i in range(num_bands)]
#print(band_filters)
# Apply each bandpass filter to the FFT signal
band_signals = [signal.filtfilt(band_filters[i][0], band_filters[i][1], librosa.resample(audio_signal.astype(np.float32),orig_sr=sample_rate,target_sr=sample_rate_list[i]) ) for i in range(num_bands)]
# Calculate energy for each frequency band
band_energy = [ get_rms(band_signals[i]) for i in range(num_bands)]
return band_energy, center_freqs
def find_index(lst):
last_elem = lst[-1]
for i, elem in reversed(list(enumerate(lst))):
if elem - last_elem > 9:
return i
return -1
def get_effective_spectral(audiofile):
audio_data, sample_rate, ch = get_data_array(audiofile)
# Calculate energy for each frequency band
band_energy, upper_freqs = calculate_band_energy(audio_data, sample_rate, 40)
for i in range(len(band_energy)):
print(upper_freqs[i],band_energy[i])
final_index = find_index(band_energy)
# Print energy values for each band
return upper_freqs[final_index]
def find_max_energy_frequency(file_path):
# 加载声音文件
data,rate,ch = get_data_array(file_path)
# 去均值,去掉直流分量
data = data - np.mean(data)
# 计算FFT(快速傅里叶变换)
n = len(data)
if n % 2 == 1:
data = np.append(data, 0)
n += 1
X = np.fft.rfft(data)
X = X[:n//2+1]
# 计算频域能量
energy = np.abs(X)**2
# 找到能量最大的频率
f = np.linspace(0, rate / 2, len(X))
max_energy_index = np.argmax(energy)
max_energy_frequency = f[max_energy_index]
return max_energy_frequency
def cal_pitch(ref_path,file_path,pitchlogMode=1):
src_fs = find_max_energy_frequency(ref_path)
ds_fs = find_max_energy_frequency(file_path)
assert pitchlogMode == 0 or pitchlogMode == 1
result = 0
if pitchlogMode == 1:
result = math.log(ds_fs/src_fs, 2) * 12
if pitchlogMode == 0:
result = ds_fs/src_fs
return result
def get_subarray(arr, lower, upper):
"""
在递增列表arr中,找到在[lower, upper]范围内的元素,返回新数组以及开始和结束位置的索引
参数:
arr: 一维递增的numpy数组
lower: 范围的下限
upper: 范围的上限
返回值:
一个元组,包含三个元素,第一个元素为新数组,第二个元素为开始位置的索引,第三个元素为结束位置的索引
"""
# 将列表转换为一维numpy数组
arr = np.array(arr)
# 使用searchsorted函数寻找新数组的开始和结束位置
start_index = np.searchsorted(arr, lower, side='left')
end_index = np.searchsorted(arr, upper, side='right')
# 使用切片语法提取新数组
new_arr = arr[start_index:end_index]
# 返回新数组以及开始和结束位置的索引
return (new_arr, start_index, end_index - 1)
def find_max_min_diff_index(arr1, arr2, mode='max'):
"""
Find the index of max or min absolute difference between two arrays
:param arr1: first array
:param arr2: second array
:param mode: 'max' for maximum absolute difference, 'min' for minimum absolute difference
:return: the index of the point with max or min absolute difference
"""
arr1,arr2 = np.array(arr1),np.array(arr2)
mask = np.isfinite(arr1) & np.isfinite(arr2)
arr1 = arr1[mask]
arr2 = arr2[mask]
# 找到绝对值相差最大或最小的点
if mode == 'max':
diff = np.abs(arr1 - arr2)
index = np.argmax(diff)
elif mode == 'min':
diff = np.abs(arr1 - arr2)
index = np.argmin(diff)
else:
raise ValueError("Invalid mode, should be 'max' or 'min'")
# 返回结果
return (mask.nonzero()[0][index], diff[index])
def cal_EQ(ref_path,file_path):
"""
"""
# ref_data,fs,ch = get_data_array('01_POLQA.wav')
# ref_data = ref_data[int(fs*1.1):int(fs*2.7)]
# print(ref_data)
# ref_data_8k = librosa.resample(ref_data.astype(np.float32), orig_sr=fs ,target_sr=8000)
refspec = get_effective_spectral(ref_path)
testspec = get_effective_spectral(file_path)
#print(refspec,testspec)
print(refspec,testspec)
maxfreq = min(refspec,testspec)
minfreq = 100
center_freqs = [31, 62, 125, 250, 500, 1000, 2000, 4000, 8000, 16000]
pre_data, sample_rate, ch = get_data_array(ref_path)
# pre_data_8k = librosa.resample(pre_data.astype(np.float32), orig_sr=sample_rate, target_sr=8000)
# maxCoin, startPoint = get_max_cc_by_dll(ref_data_8k, pre_data_8k, get_my_dll(), 3)
# print(maxCoin,startPoint)
# if maxCoin < 0.3:
# return None
# pre_data = pre_data[int(startPoint):int(startPoint+sample_rate*1.5)]
pre_energy, pre_freq = calculate_octave_band_energy(pre_data, sample_rate)
#pre_energy, pre_freq = calculate_band_energy(pre_data, sample_rate,218)
post_data, sample_rate, ch = get_data_array(file_path)
# post_data_8k = librosa.resample(post_data.astype(np.float32), orig_sr=sample_rate, target_sr=8000)
# maxCoin, startPoint = get_max_cc_by_dll(ref_data_8k, post_data_8k, get_my_dll(), 3)
# print(maxCoin, startPoint)
# if maxCoin < 0.3:
# return None
# post_data = post_data[int(startPoint):int(startPoint+sample_rate*1.5)]
post_energy, post_freq = calculate_octave_band_energy(post_data, sample_rate)
#post_energy, post_freq = calculate_band_energy(post_data, sample_rate,218)
newfreq,startindex,stopindex = get_subarray(pre_freq,minfreq,maxfreq)
pre_energy = pre_energy[startindex:stopindex+1]
post_energy = post_energy[startindex:stopindex+1]
# for i in range(len(newfreq)):
# print(newfreq[i])
# print(pre_energy[i])
# print(post_energy[i])
index,maxdiff = find_max_min_diff_index(pre_energy,post_energy)
if maxdiff < 1:
return None
realdiff = 2*(post_energy[index]-pre_energy[index])
result = 15 if realdiff > 15 else (-15 if realdiff < -15 else realdiff)
return newfreq[index],result
if __name__ == '__main__':
# freq_points = np.logspace(20, 20000,num=10)
# print(freq_points)
#freq,db = cal_EQ('pre_process.wav','post_process.wav')
#print(freq,db)
# ref = '8_final_speech.wav'
# #print(isSlience(ref,section=[0,20]),)
# #print(audioFormat(ref))
#print(get_effective_spectral('pre_process_balnk.wav'))
# @title Install and Import Dependencies
# this assumes that you have a relevant version of PyTorch installed
from commFunction import vad_nn
print(vad_nn('01_POLQA.wav'))
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/FUNCTION/audioFunction.py
|
audioFunction.py
|
import copy
import sys
import wave
import sys,os
from os import path
sys.path.append(os.path.dirname(path.dirname(__file__)))
from ctypes import *
from commFunction import emxArray_real_T,get_data_of_ctypes_,write_ctypes_data_2_file_,get_none_data_of_ctypes_
import ctypes
# DLL_EXPORT void matchsig_2(const emxArray_real_T *ref, const emxArray_real_T *sig, double
# fs, double type,emxArray_real_T *sig_out, double *delay, double *err)
# void matchsig_2(const emxArray_real_T *ref, const emxArray_real_T *sig, double
# fs, double type, emxArray_real_T *sig_out, double *delay, double
# *err)
# void matchsig_2(const emxArray_real_T *ref, const emxArray_real_T *sig, double
# fs, double type, emxArray_real_T *sig_out, double *delay, double
# *err)
def match_sig(refFile=None,testFile=None,outFile=None,audioType=0):
"""
Parameters
----------
refFile
testFile
outFile
audioType 0:speech,1:music
Returns
-------
"""
refstruct, refsamplerate,reflen = get_data_of_ctypes_(refFile)
teststruct, testsamplerate,testlen = get_data_of_ctypes_(testFile)
outlen = max(reflen,testlen)
outStruct = get_none_data_of_ctypes_(outlen)
if refsamplerate != testsamplerate :
raise TypeError('Different format of ref and test files!')
import platform
mydll = None
cur_paltform = platform.platform().split('-')[0]
if cur_paltform == 'Windows':
mydll = ctypes.windll.LoadLibrary(sys.prefix + '/matchsig.dll')
if cur_paltform == 'macOS':
mydll = CDLL(sys.prefix + '/matchsig.dylib')
if cur_paltform == 'Linux':
mydll = CDLL(sys.prefix + '/matchsig.so')
mydll.matchsig_2.argtypes = [POINTER(emxArray_real_T), POINTER(emxArray_real_T), POINTER(emxArray_real_T),c_double,c_double,
POINTER(c_double), POINTER(c_double)]
delay, err = c_double(0.0), c_double(0.0)
mydll.matchsig_2(byref(refstruct), byref(teststruct), byref(outStruct),c_double(refsamplerate),c_double(audioType),byref(delay), byref(err))
if err.value > 0.0:
print(err.value)
return None
else:
if outFile is not None:
write_ctypes_data_2_file_(outFile,outStruct,refsamplerate)
return delay.value/testsamplerate
if __name__ == '__main__':
test = 'mixDstFile_minus_13.wav'
ref = 'speech_cn.wav'
print(match_sig(refFile=ref, testFile=test,audioType=0))
pass
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/SNR_ESTIMATION/MATCH_SIG.py
|
MATCH_SIG.py
|
import copy
import sys
import time
import sys,os
from os import path
sys.path.append(os.path.dirname(path.dirname(__file__)))
from ctypes import *
from commFunction import emxArray_real_T,get_data_of_ctypes_from_datablock,get_data_array,get_none_data_of_ctypes_
import ctypes
import numpy as np
# DLL_EXPORT void matchsig_2(const emxArray_real_T *ref, const emxArray_real_T *sig, double
# fs, double type,emxArray_real_T *sig_out, double *delay, double *err)
# void matchsig_2(const emxArray_real_T *ref, const emxArray_real_T *sig, double
# fs, double type, emxArray_real_T *sig_out, double *delay, double
# *err)
# void matchsig_2(const emxArray_real_T *ref, const emxArray_real_T *sig, double
# fs, double type, emxArray_real_T *sig_out, double *delay, double
# *err)
def match_sig(refData=None,testData=None,refsamplerate=48000):
"""
Parameters
----------
refFile
testFile
outFile
audioType 0:speech,1:music
Returns
-------
"""
assert len(refData) == len(testData)
nframes = len(refData)
print(time.time())
refstruct = get_data_of_ctypes_from_datablock(refData,nframes)
print(time.time())
teststruct = get_data_of_ctypes_from_datablock(testData,nframes)
print(time.time())
outStruct = get_none_data_of_ctypes_(nframes)
import platform
mydll = None
cur_paltform = platform.platform().split('-')[0]
if cur_paltform == 'Windows':
mydll = ctypes.windll.LoadLibrary(sys.prefix + '/matchsig.dll')
if cur_paltform == 'macOS':
mydll = CDLL(sys.prefix + '/matchsig.dylib')
mydll.matchsig_2.argtypes = [POINTER(emxArray_real_T), POINTER(emxArray_real_T), POINTER(emxArray_real_T),c_double,c_double,
POINTER(c_double), POINTER(c_double)]
delay, err = c_double(0.0), c_double(0.0)
print(time.time())
mydll.matchsig_2(byref(refstruct), byref(teststruct), byref(outStruct),c_double(refsamplerate),c_double(0),byref(delay), byref(err))
print(time.time())
if err.value > 0.0:
return None
else:
return delay.value
if __name__ == '__main__':
ref = r'C:\Users\vcloud_avl\Documents\我的POPO\src.wav'
test = r'C:\Users\vcloud_avl\Documents\我的POPO\test.wav'
refdata,fs,ch = get_data_array(ref)
testdata,fs,ch = get_data_array(test)
testdata = testdata[:96000]
refdata = refdata[:96000]
print(time.time())
print(match_sig(refData=refdata, testData=testdata))
print(time.time())
print(np.corrcoef(refdata,testdata))
print(time.time())
pass
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/SNR_ESTIMATION/MATC_SIG_BY_DATA.py
|
MATC_SIG_BY_DATA.py
|
import librosa
import requests
import numpy as np
import datetime
SCORING_URL = 'https://dnsmos.azurewebsites.net/score-dec'
AUTH = ('netease',
'decmos')
SCENARIOS = [
'doubletalk_with_movement',
'doubletalk',
'farend_singletalk_with_movement',
'farend_singletalk',
'nearend_singletalk'
]
def read_and_process_audio_files(lpb_path, mic_path, enh_path,startPoint,SAMPLE_RATE):
lpb_sig, _ = librosa.load(lpb_path, sr=SAMPLE_RATE)
mic_sig, _ = librosa.load(mic_path, sr=SAMPLE_RATE)
enh_sig, _ = librosa.load(enh_path, sr=SAMPLE_RATE)
# Make the clips the same length
min_len = np.min([len(lpb_sig), len(mic_sig), len(enh_sig)])
lpb_sig = lpb_sig[:min_len]
mic_sig = mic_sig[:min_len]
enh_sig = enh_sig[:min_len]
lpb_sig, mic_sig, enh_sig = process_audio(
lpb_sig, mic_sig, enh_sig,startPoint,SAMPLE_RATE)
return lpb_sig, mic_sig, enh_sig
def process_audio(lpb_sig, mic_sig, enh_sig,startPoint,SAMPLE_RATE):
silence_duration = startPoint * SAMPLE_RATE
lpb_sig = lpb_sig[silence_duration:]
mic_sig = mic_sig[silence_duration:]
enh_sig = enh_sig[silence_duration:]
return lpb_sig, mic_sig, enh_sig
def get_score(lpb_sig, mic_sig, enh_sig, scenario):
audio_data = {
'lpb': lpb_sig.tolist(),
'mic': mic_sig.tolist(),
'enh': enh_sig.tolist(),
'scenario': scenario
}
response = requests.post(SCORING_URL, json=audio_data, auth=AUTH)
json_body = response.json()
if 'error' in json_body:
raise Exception(json_body['error'])
return json_body
def cal_aec_mos(refFile=None,micFile=None,testFile=None,scenario=1,startPoint=0,SAMPLE_RATE=48000):
"""
Parameters
----------
refFile
micFile
testFile
scenario 0: doubletalk_with_movement 1:doubletalk 2:farend_singletalk_with_movement 3:farend_singletalk 4:nearend_singletalk
startPoint: determine from which audio sample (startPoint(in second) * SAMPLE_RATE) to calculte
SAMPLE_RATE:
Returns
-------
"""
start = datetime.datetime.now()
lpb_sig, mic_sig, enh_sig = read_and_process_audio_files(
refFile, micFile, testFile,startPoint,SAMPLE_RATE)
sjson = get_score(lpb_sig,mic_sig,enh_sig,SCENARIOS[scenario])
end = datetime.datetime.now()
print('time duration:',end-start)
return sjson
if __name__ == '__main__':
path = r'D:\AudioPublicWork\3a_auto_test_porject\3a_auto_test_porject\08_TestDstFiles\sdk_zego_vivo_y3hf_music_V_shengbo_compare\aec\Speech\TestCase_01_None_None\near_cn'
ref = path + '\\' + 'far_cn_minus_30db.wav'
mic = path + '\\' + 'stdRefFile.wav'
test = path + '\\' + 'mixDstFile.wav'
cal_aec_mos(testFile=test,refFile=ref,micFile=mic,startPoint=0,SAMPLE_RATE=16000,scenario=0)
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/AEC_MOS/aecmos.py
|
aecmos.py
|
import numpy as np
import librosa
import soundfile as sf
import math
def histogram_calculation(x=None, N=None, K=None):
'''
x: audio samples
N: number of audio sample
K: bins of histogram
'''
x_max = np.max(x)
x_min = np.min(x)
denorm = x_max - x_min
H = np.zeros(shape=[K, ], dtype=np.int32)
for n in range(N):
y = (x[n] - x_min) / denorm
k = int(K * y)
if k < K:
H[k] = H[k] + 1
else:
H[k - 1] = H[k - 1] + 1
return H
def clipping_coefficient_calculation(H=None, K=None):
'''
'''
kl = 0
kr = K - 1
denorm = K - 1
yl0 = H[kl]
yr0 = H[kr]
dl, dr = 0, 0
Dmax = 0
# # modify0
# while kr > K//2:
# kr = kr - 1
# if H[kr] < yr0:
# dr = dr + 1
# else:
# break
# Dmax = np.maximum(Dmax, dr)
# while kl < K//2:
# kl = kl + 1
# if H[kl] < yl0:
# dl = dl + 1
# else:
# break
# Dmax = np.maximum(Dmax, dl)
# Rcl = 2*Dmax/denorm
# while kr > kl:
# kl = kl + 1
# kr = kr - 1
# if H[kl] <= yl0:
# dl = dl + 1
# else:
# yl0 = H[kl]
# dl = 0
# if H[kr] <= yr0:
# dr = dr + 1
# else:
# yr0 = H[kr]
# dr = 0
# Dmax = np.maximum(Dmax, np.maximum(dl, dr))
# Rcl = 2*Dmax/denorm
# return Rcl
# modify1
index_l = 0
index_r = K - 1
dl_list = [] # [ [index, d, H[index] ] ... ]
dr_list = []
while kr > kl:
kl = kl + 1
kr = kr - 1
if H[kl] <= yl0:
dl = dl + 1
else:
dl_list.append([index_l, dl, H[index_l]])
index_l = kl
yl0 = H[kl]
dl = 0
if H[kr] <= yr0:
dr = dr + 1
else:
dr_list.append([index_r, dr, H[index_r]])
index_r = kr
yr0 = H[kr]
dr = 0
Dmax = np.maximum(Dmax, np.maximum(dl, dr))
Rcl = 2 * Dmax / denorm
dl_list.append([index_l, dl, H[index_l]])
dr_list.append([index_r, dr, H[index_r]])
list_all = []
list_all.extend(dl_list)
list_all.extend(dr_list)
list_all.sort(key=lambda x: x[1], reverse=True)
Dmax_index = list_all[0][0]
Dmax_samples = list_all[0][2]
# if Rcl >= 0.9:
# print(list_all)
# print('Dmax:')
# print(Dmax)
# print('Dmax index:')
# print(list_all[0][0])
# print('Dmax H[index]:')
# print(list_all[0][2])
# print('Rcl:')
# print(Rcl)
# print()
return Rcl, Dmax_index, Dmax_samples
def get_rms(data=None):
rms = math.sqrt(sum([x * x for x in data]) / len(data))
dBrmsValue = 20 * math.log10(rms + 1.0E-6)
return dBrmsValue
def clip_detect(audio_data=None, sr=None):
'''
audio_data: audio samples float [-1,1)
sr: sample_rate
'''
t = 30 # detect period /ms
N = int(sr * t / 1000) # number of audio sample in a detect unit
K = 200 # number of histogram bins
audio_piece = len(audio_data) // N # 4000 samples one detect unit
Rcl_list = []
clipping_flag_list = []
for i in range(audio_piece):
audio_slice = audio_data[i * N:(i + 1) * N]
dBrms = get_rms(audio_slice) # 排除底噪段
if dBrms < -50:
Rcl_list.append(0)
clipping_flag_list.append(0)
continue
x_max = np.max(audio_slice) # 避免 histogram_calculation 中分母为零
x_min = np.min(audio_slice)
if x_max == x_min:
Rcl_list.append(0)
clipping_flag_list.append(0)
continue
H = histogram_calculation(x=audio_slice, N=N, K=K)
Rcl, Dmax_index, Dmax_samples = clipping_coefficient_calculation(H=H, K=K) # Rcl: ratio of clipping
Rcl_list.append(Rcl)
# Rcl ratio towards clipping_flag (0/1)
if Rcl >= 0.9 and Dmax_samples > 4 * N / K: # clipping detection flag
Rcl = 1
else:
Rcl = 0
clipping_flag_list.append(Rcl)
clipping_ratio = np.zeros(shape=[len(audio_data), ], dtype=np.float32)
clipping_detect_flag = np.zeros(shape=[len(audio_data), ], dtype=np.float32)
for i in range(audio_piece):
clipping_ratio[i * N:(i + 1) * N] = Rcl_list[i] * 10000 / 32768
clipping_detect_flag[i * N:(i + 1) * N] = clipping_flag_list[i] * 10000 / 32768
return sum(clipping_flag_list)/len(clipping_flag_list)
def cal_clip_index(testFile=None):
"""
Parameters
----------
testFile
Returns
-------
"""
audio_data, sr = librosa.load(testFile, sr=None) # float [-1,1)
return clip_detect(audio_data=audio_data, sr=sr)
if __name__ == '__main__':
clipindex = cal_clip_index('mixDstFile.wav')
print(clipindex)
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/CLIPPING_DETECTION/audio_clip_detection.py
|
audio_clip_detection.py
|
from datetime import date, datetime
from matplotlib.mlab import cohere,psd
from numpy import append,array,clip,log10,nonzero,ones,power,reshape
from numpy import searchsorted,shape,sqrt,sum,vstack,zeros
from numpy.ma import masked_array
from scipy.io import wavfile
from scipy.signal import butter,firwin,decimate,lfilter
from sys import stdout
from warnings import catch_warnings,simplefilter
__author__ = "Jonathan Polom <[email protected]>"
__date__ = date(2011, 4, 22)
__version__ = "0.5"
def thirdOctaves(minFreq, maxFreq):
"""
Calculates a list of frequencies spaced 1/3 octave apart in hertz
between minFreq and maxFreq
Input
-----
* minFreq : float or int
Must be non-zero and non-negative
* maxFreq : float or int
Must be non-zero and non-negative
Output
------
* freqs : ndarray
"""
if minFreq <= 0 or maxFreq <= 0:
raise ValueError("minFreq and maxFreq must be non-zero and non-negative")
else:
maxFreq = float(maxFreq)
f = float(minFreq)
freqs = array([f])
while f < maxFreq:
f = f * 10**0.1
freqs = append(freqs, f)
return freqs
def fftWindowSize(freqRes, hz):
"""
Calculate power of 2 window length for FFT to achieve specified frequency
resolution. Useful for power spectra and coherence calculations.
Input
-----
* freqRes : float
Desired frequency resolution in hertz
* hz : int
Sample rate, in hertz, of signal undergoing FFT
Output
------
* window : int
"""
freqRes = float(freqRes) # make sure frequency res is a float
pwr = 1 # initial power of 2 to try
res = hz / float(2**pwr) # calculate frequency resolution
while res > freqRes:
pwr += 1
res = hz / float(2**pwr)
return 2**pwr
def downsampleBands(audio, hz, downsampleFactor):
"""
Downsample audio by integer factor
Input
-----
* audio : array-like
Array of original audio samples
* hz : float or int
Original audio sample rate in hertz
* downsampleFactor : int
Factor to downsample audio by, if desired
Output
------
* dsAudio : ndarray
Downsampled audio array
* hz : int
Downsampled audio sample rate in hertz
"""
# calculate downsampled audio rate in hertz
downsampleFactor = int(downsampleFactor) # factor must be integer
hz = int(hz / downsampleFactor)
for band in audio:
ds = decimate(band, downsampleFactor, ftype='fir')
try:
dsAudio = append(dsAudio, ds)
except:
dsAudio = ds
return dsAudio, hz
def octaveBandFilter(audio, hz,
octaveBands=[125, 250, 500, 1000, 2000, 4000, 8000],
butterOrd=6, hammingTime=16.6):
"""
Octave band filter raw audio. The audio is filtered through butterworth
filters of order 6 (by default), squared to obtain the envelope and finally
low-pass filtered using a 'hammingTime' length Hamming filter at 25 Hz.
Input
-----
* audio : array-like
Array of raw audio samples
* hz : float or int
Audio sample rate in hertz
* octaveBands : array-like
list or array of octave band center frequencies
* butterOrd : int
butterworth filter order
* hammingTime : float or int
Hamming window length, in milliseconds relative to audio sample rate
Output
------
* octaveBandAudio : ndarray
Octave band filtered audio
* hz : float or int
Filtered audio sample rate
"""
print("Butterworth filter order:",butterOrd)
print("Hamming filter length: ",hammingTime,"milliseconds")
print ("Audio sample rate: ",hz)
# calculate the nyquist frequency
nyquist = hz * 0.5
# length of Hamming window for FIR low-pass at 25 Hz
hammingLength = (hammingTime / 1000.0) * hz
print(hammingLength)
# process each octave band
for f in octaveBands:
bands = str(octaveBands[:octaveBands.index(f) + 1]).strip('[]')
statusStr = "Octave band filtering audio at: " + bands
unitStr = "Hz ".rjust(80 - len(statusStr))
stdout.write(statusStr)
stdout.write(unitStr)
stdout.write('\r')
stdout.flush()
# filter the output at the octave band f
f1 = f / sqrt(2)
f2 = f * sqrt(2)
# for some odd reason the band-pass butterworth doesn't work right
# when the filter order is high (above 3). likely a SciPy issue?
# also, butter likes to complain about possibly useless results when
# calculating filter coefficients for high order (above 4) low-pass
# filters with relatively low knee frequencies (relative to nyquist F).
# perhaps I just don't know how digital butterworth filters work and
# their limitations but I think this is odd.
# the issue described here will be sent to their mailing list
if f < max(octaveBands):
with catch_warnings(): # suppress the spurious warnings given
simplefilter('ignore') # under certain conditions
b1,a1 = butter(butterOrd, f1/nyquist, btype='high')
b2,a2 = butter(butterOrd, f2/nyquist, btype='low')
filtOut = lfilter(b1, a1, audio) # high-pass raw audio at f1
filtOut = lfilter(b2, a2, filtOut) # low-pass after high-pass at f1
else:
with catch_warnings():
simplefilter('ignore')
b1,a1 = butter(butterOrd, f/nyquist, btype='high')
filtOut = lfilter(b1, a1, audio)
filtOut = array(filtOut)**2
b = firwin(int(hammingLength), 25.0, nyq=nyquist)
filtOut = lfilter(b, 1, filtOut)
filtOut = filtOut * -1.0
# stack-up octave band filtered audio
try:
octaveBandAudio = vstack((octaveBandAudio, filtOut))
except:
octaveBandAudio = filtOut
print()
return octaveBandAudio
def octaveBandSpectra(filteredAudioBands, hz, fftRes=0.06):
"""
Calculate octave band power spectras
Input
-----
* filteredAudioBands : array-like
Octave band filtered audio
* hz : float or int
Audio sample rate in hertz. Must be the same for clean and dirty audio
* fftRes : float or int
Desired FFT frequency resolution
Output
------
* spectras : ndarray
Power spectra values
* fftfreqs : ndarray
Frequencies for FFT points
"""
# FFT window size for PSD calculation: 32768 for ~0.06 Hz res at 2 kHz
psdWindow = fftWindowSize(fftRes, hz)
print("Calculating octave band power spectras",)
print("(FFT length:",psdWindow,"samples)")
for band in filteredAudioBands:
spectra, freqs = psd(band, NFFT=psdWindow, Fs=hz)
spectra = reshape(spectra, len(freqs)) # change to row vector
spectra = spectra / max(spectra) # scale to [0,1]
# stack-up octave band spectras
try:
spectras = vstack((spectras, spectra))
fftfreqs = vstack((fftfreqs, freqs))
except:
spectras = spectra
fftfreqs = freqs
return spectras, fftfreqs
def octaveBandCoherence(degrAudioBands, refAudioBands,
hz, fftRes=0.122):
"""
Calculate coherence between clean and degraded octave band audio
Input
-----
* degrAudioBands : array-like
Degraded octave band audio
* refAudioBands : array-like
Reference (clean) octave band audio
* hz : float or int
Audio sample rate. Must be common between clean and dirty audio
* fftRes : float or int
Desired FFT frequency resolution
Output
------
* coherences : ndarray
Coherence values
* fftfreqs : ndarray
Frequencies for FFT points
"""
# FFT window size for PSD calculation: 32768 for ~0.06 Hz res at 2 kHz
# Beware that 'cohere' isn't as forgiving as 'psd' with FFT lengths
# larger than half the length of the signal
psdWindow = fftWindowSize(fftRes, hz)
print("Calculating degraded and reference audio coherence",)
print ("(FFT length:",psdWindow,"samples)")
for i,band in enumerate(degrAudioBands):
with catch_warnings(): # catch and ignore spurious warnings
simplefilter('ignore') # due to some irrelevant divide by 0's
coherence, freqs = cohere(band, refAudioBands[i],
NFFT=psdWindow, Fs=hz)
# stack-up octave band spectras
try:
coherences = vstack((coherences, coherence))
fftfreqs = vstack((fftfreqs, freqs))
except:
coherences = coherence
fftfreqs = freqs
return coherences, fftfreqs
def thirdOctaveRootSum(spectras, fftfreqs, minFreq=0.25, maxFreq=25.0):
"""
Calculates square root of sum of spectra over 1/3 octave bands
Input
-----
* spectras : array-like
Array or list of octave band spectras
* fftfreqs : array-like
Array or list of octave band FFT frequencies
* minFreq : float
Min frequency in 1/3 octave bands
* maxFreq : float
Max frequency in 1/3 octave bands
Output
------
* thirdOctaveRootSums : ndarray
Square root of spectra sums over 1/3 octave intervals
"""
print ("Calculating 1/3 octave square-rooted sums from",)
print (minFreq,"to",maxFreq,"Hz")
thirdOctaveBands = thirdOctaves(minFreq, maxFreq)
# loop over the spectras contained in 'spectras' and calculate 1/3 oct MTF
for i,spectra in enumerate(spectras):
freqs = fftfreqs[i] # get fft frequencies for spectra
# calculate the third octave sums
for f13 in thirdOctaveBands:
f131 = f13 / power(2, 1.0/6.0) # band start
f132 = f13 * power(2, 1.0/6.0) # band end
li = searchsorted(freqs, f131)
ui = searchsorted(freqs, f132) + 1
s = sum(spectra[li:ui]) # sum the spectral components in band
s = sqrt(s) # take square root of summed components
try:
sums = append(sums, s)
except:
sums = array([s])
# stack-up third octave modulation transfer functions
try:
thirdOctaveSums = vstack((thirdOctaveSums, sums))
except:
thirdOctaveSums = sums
# remove temp 'sum' and 'counts' variables for next octave band
del(sums)
return thirdOctaveSums
def thirdOctaveRMS(spectras, fftfreqs, minFreq=0.25, maxFreq=25.0):
"""
Calculates RMS value of spectra over 1/3 octave bands
Input
-----
* spectras : array-like
Array or list of octave band spectras
* fftfreqs : array-like
Array or list of octave band FFT frequencies
* minFreq : float
Min frequency in 1/3 octave bands
* maxFreq : float
Max frequency in 1/3 octave bands
Output
------
* thirdOctaveRMSValues : ndarray
RMS value of spectra over 1/3 octave intervals
"""
print ("Calculating 1/3 octave RMS values from",)
print (minFreq,"to",maxFreq,"Hz")
thirdOctaveBands = thirdOctaves(minFreq, maxFreq)
# loop over the spectras contained in 'spectras' and calculate 1/3 oct MTF
for i,spectra in enumerate(spectras):
freqs = fftfreqs[i] # get fft frequencies for spectra
# calculate the third octave sums
for f13 in thirdOctaveBands:
f131 = f13 / power(2, 1.0/6.0) # band start
f132 = f13 * power(2, 1.0/6.0) # band end
li = searchsorted(freqs, f131)
ui = searchsorted(freqs, f132) + 1
s = sum(spectra[li:ui]**2) # sum the spectral components in band
s = s / len(spectra[li:ui]) # divide by length of sum
s = sqrt(s) # square root
try:
sums = append(sums, s)
except:
sums = array([s])
# stack-up third octave modulation transfer functions
try:
thirdOctaveRMSValues = vstack((thirdOctaveRMSValues, sums))
except:
thirdOctaveRMSValues = sums
# remove temp 'sum' and 'counts' variables for next octave band
del(sums)
return thirdOctaveRMSValues
def sti(modulations, coherences, minCoherence=0.8):
"""
Calculate the speech transmission index from third octave modulation
indices. The indices are truncated after coherence between clean and dirty
audio falls below 'minCoherence' or 0.8, by default.
Input
-----
* modulations : array-like
Modulation indices spaced at 1/3 octaves within each octave band
* coherences : array-like
Coherence between clean and dirty octave band filtered audio
* minCoherence : float
The minimum coherence to include a mod index in the STI computation
Output
------
* index : float
The speech transmission index (STI)
"""
# create masking array of zeroes
snrMask = zeros(modulations.shape, dtype=int)
# sort through coherence array and mask corresponding SNRs where coherence
# values fall below 'minCoherence' (0.8 in most cases and by default)
for i,band in enumerate(coherences):
lessThanMin = nonzero(band < minCoherence)[0]
if len(lessThanMin) >= 1:
discardAfter = min(lessThanMin)
snrMask[i][discardAfter:] = ones((len(snrMask[i][discardAfter:])))
modulations = clip(modulations, 0, 0.99) # clip to [0, 0.99] (max: ~1)
snr = 10*log10(modulations/(1 - modulations)) # estimate SNR
snr = clip(snr, -15, 15) # clip to [-15,15]
snr = masked_array(snr, mask=snrMask) # exclude values from sum
snrCounts = (snr / snr).sum(axis=1) # count SNRs
snrCounts = snrCounts.data # remove masking
octaveBandSNR = snr.sum(axis=1) / snrCounts # calc average SNR
alpha = 7 * (snrCounts / snrCounts.sum()) # calc alpha weight
# octave band weighting factors, Steeneken and Houtgast (1985)
w = [0.129, 0.143, 0.114, 0.114, 0.186, 0.171, 0.143]
# calculate the STI measure
snrp = alpha * w * octaveBandSNR
snrp = snrp.sum()
index = (snrp + 15) / 30.0
print ("Speech Transmission Index (STI):",index)
return index
def stiFromAudio(reference, degraded, hz, calcref=False, downsample=None,
name="unnamed"):
"""
Calculate the speech transmission index (STI) from clean and dirty
(ie: distorted) audio samples. The clean and dirty audio samples must have
a common sample rate for successful use of this function.
Input
-----
* reference : array-like
Clean reference audio sample as an array of floating-point values
* degraded : array-like
Degraded audio sample as an array, or array of arrays for multiple
samples, of floating-point values
* hz : int
Audio sample rate in hertz
* calcref : boolean
Calculate STI for reference signal alone
* downsample : int or None
Downsampling integer factor
* name : string
Name of sample set, for output tracking in larger runs
Output
------
* sti : array-like or float
The calculated speech transmission index (STI) value(s)
"""
# put single sample degraded array into another array so the loop works
if type(degraded) is not type([]):
degraded = [degraded]
print ("-"*80)
print ("Speech Transmission Index (STI) from speech waveforms".center(80))
print ("-"*80)
print ()
print ("Sample set: ",name)
print ("Number of samples: ",len(degraded))
print ("Date/time: ",datetime.now().isoformat())
print ("Calculate reference STI:",)
if calcref:
print ("yes")
else:
print ("no")
print (" Reference Speech ".center(80,'*'))
refOctaveBands = octaveBandFilter(reference, hz)
refRate = hz
# downsampling, if desired
if type(downsample) is type(1):
refOctaveBands, refRate = downsampleBands(refOctaveBands, refRate,
downsample)
# calculate STI for reference sample, if boolean set
if calcref:
# STI calc procedure
spectras, sfreqs = octaveBandSpectra(refOctaveBands, refRate)
coherences, cfreqs = octaveBandCoherence(refOctaveBands, refOctaveBands,
refRate)
thirdOctaveMTF = thirdOctaveRootSum(spectras, sfreqs)
thirdOctaveCoherences = thirdOctaveRMS(coherences, cfreqs)
# add to interim array for MTFs and coherences
try:
thirdOctaveTemps.append([thirdOctaveMTF, thirdOctaveCoherences])
except:
thirdOctaveTemps = [[thirdOctaveMTF, thirdOctaveCoherences]]
print()
# loop over degraded audio samples and calculate STIs
for j,sample in enumerate(degraded):
print (" Degraded Speech: Sample {0} ".format(j + 1).center(80,'*'))
degrOctaveBands = octaveBandFilter(sample, hz)
degrRate = hz
# downsampling, if desired
if type(downsample) is type(1):
degrOctaveBands, degrRate = downsampleBands(degrOctaveBands,
degrRate, downsample)
# STI calc procedure
spectras, sfreqs = octaveBandSpectra(degrOctaveBands, degrRate)
coherences, cfreqs = octaveBandCoherence(refOctaveBands,
degrOctaveBands, refRate)
thirdOctaveMTF = thirdOctaveRootSum(spectras, sfreqs)
thirdOctaveCoherences = thirdOctaveRMS(coherences, cfreqs)
# add to interim array for MTFs and coherences
try:
thirdOctaveTemps.append([thirdOctaveMTF, thirdOctaveCoherences])
except:
thirdOctaveTemps = [[thirdOctaveMTF, thirdOctaveCoherences]]
print()
# calculate the STI values
print (" Speech Transmission Index ".center(80,'*'))
for i in range(0,len(thirdOctaveTemps)):
sampleSTI = sti(thirdOctaveTemps[i][0], thirdOctaveTemps[i][1])
# add to STI output array
try:
stiValues.append(sampleSTI)
except:
stiValues = [sampleSTI]
# unpack single value
if len(stiValues) == 1:
stiValues = stiValues[0]
print()
return stiValues
def readwav(path):
"""
Reads Microsoft WAV format audio files, scales integer sample values and
to [0,1]. Returns a tuple consisting of scaled WAV samples and sample rate
in hertz.
Input
-----
* path : string
Valid system path to file
Output
------
* audio : array-like
Array of scaled sampled
* rate : int
Audio sample rate in hertz
"""
wav = wavfile.read(path)
rate = wav[0]
audio = array(wav[1])
scale = float(max(audio))
audio = audio / scale
return audio, rate
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/STI/sti.py
|
sti.py
|
dependencies = ['torch', 'torchaudio']
import sys
sys.path.append('../')
import sys
import json
import os
# from utils_vad import (init_jit_model,
# get_speech_timestamps,
# get_number_ts,
# get_language,
# get_language_and_group,
# save_audio,
# read_audio,
# VADIterator,
# collect_chunks,
# drop_chunks,
# Validator,
# OnnxWrapper)
import torch
import torchaudio
from typing import Callable, List
import warnings
languages = ['ru', 'en', 'de', 'es']
class OnnxWrapper():
def __init__(self, path, force_onnx_cpu=False):
import numpy as np
global np
import onnxruntime
opts = onnxruntime.SessionOptions()
opts.inter_op_num_threads = 1
opts.intra_op_num_threads = 1
if force_onnx_cpu and 'CPUExecutionProvider' in onnxruntime.get_available_providers():
self.session = onnxruntime.InferenceSession(path, providers=['CPUExecutionProvider'], sess_options=opts)
else:
self.session = onnxruntime.InferenceSession(path, sess_options=opts)
self.reset_states()
self.sample_rates = [8000, 16000]
def _validate_input(self, x, sr: int):
if x.dim() == 1:
x = x.unsqueeze(0)
if x.dim() > 2:
raise ValueError(f"Too many dimensions for input audio chunk {x.dim()}")
if sr != 16000 and (sr % 16000 == 0):
step = sr // 16000
x = x[:,::step]
sr = 16000
if sr not in self.sample_rates:
raise ValueError(f"Supported sampling rates: {self.sample_rates} (or multiply of 16000)")
if sr / x.shape[1] > 31.25:
raise ValueError("Input audio chunk is too short")
return x, sr
def reset_states(self, batch_size=1):
self._h = np.zeros((2, batch_size, 64)).astype('float32')
self._c = np.zeros((2, batch_size, 64)).astype('float32')
self._last_sr = 0
self._last_batch_size = 0
def __call__(self, x, sr: int):
x, sr = self._validate_input(x, sr)
batch_size = x.shape[0]
if not self._last_batch_size:
self.reset_states(batch_size)
if (self._last_sr) and (self._last_sr != sr):
self.reset_states(batch_size)
if (self._last_batch_size) and (self._last_batch_size != batch_size):
self.reset_states(batch_size)
if sr in [8000, 16000]:
ort_inputs = {'input': x.numpy(), 'h': self._h, 'c': self._c, 'sr': np.array(sr, dtype='int64')}
ort_outs = self.session.run(None, ort_inputs)
out, self._h, self._c = ort_outs
else:
raise ValueError()
self._last_sr = sr
self._last_batch_size = batch_size
out = torch.tensor(out)
return out
def audio_forward(self, x, sr: int, num_samples: int = 512):
outs = []
x, sr = self._validate_input(x, sr)
if x.shape[1] % num_samples:
pad_num = num_samples - (x.shape[1] % num_samples)
x = torch.nn.functional.pad(x, (0, pad_num), 'constant', value=0.0)
self.reset_states(x.shape[0])
for i in range(0, x.shape[1], num_samples):
wavs_batch = x[:, i:i+num_samples]
out_chunk = self.__call__(wavs_batch, sr)
outs.append(out_chunk)
stacked = torch.cat(outs, dim=1)
return stacked.cpu()
class Validator():
def __init__(self, url, force_onnx_cpu):
self.onnx = True if url.endswith('.onnx') else False
torch.hub.download_url_to_file(url, 'inf.model')
if self.onnx:
import onnxruntime
if force_onnx_cpu and 'CPUExecutionProvider' in onnxruntime.get_available_providers():
self.model = onnxruntime.InferenceSession('inf.model', providers=['CPUExecutionProvider'])
else:
self.model = onnxruntime.InferenceSession('inf.model')
else:
self.model = init_jit_model(model_path='inf.model')
def __call__(self, inputs: torch.Tensor):
with torch.no_grad():
if self.onnx:
ort_inputs = {'input': inputs.cpu().numpy()}
outs = self.model.run(None, ort_inputs)
outs = [torch.Tensor(x) for x in outs]
else:
outs = self.model(inputs)
return outs
def read_audio(path: str,
sampling_rate: int = 16000):
wav, sr = torchaudio.load(path)
if wav.size(0) > 1:
wav = wav.mean(dim=0, keepdim=True)
if sr != sampling_rate:
transform = torchaudio.transforms.Resample(orig_freq=sr,
new_freq=sampling_rate)
wav = transform(wav)
sr = sampling_rate
assert sr == sampling_rate
return wav.squeeze(0)
def save_audio(path: str,
tensor: torch.Tensor,
sampling_rate: int = 16000):
torchaudio.save(path, tensor.unsqueeze(0), sampling_rate, bits_per_sample=16)
def init_jit_model(model_path: str,
device=torch.device('cpu')):
torch.set_grad_enabled(False)
model = torch.jit.load(model_path, map_location=device)
model.eval()
return model
def make_visualization(probs, step):
import pandas as pd
pd.DataFrame({'probs': probs},
index=[x * step for x in range(len(probs))]).plot(figsize=(16, 8),
kind='area', ylim=[0, 1.05], xlim=[0, len(probs) * step],
xlabel='seconds',
ylabel='speech probability',
colormap='tab20')
def get_speech_timestamps(audio: torch.Tensor,
model,
threshold: float = 0.5,
sampling_rate: int = 16000,
min_speech_duration_ms: int = 250,
max_speech_duration_s: float = float('inf'),
min_silence_duration_ms: int = 100,
window_size_samples: int = 512,
speech_pad_ms: int = 30,
return_seconds: bool = False,
visualize_probs: bool = False,
progress_tracking_callback: Callable[[float], None] = None):
"""
This method is used for splitting long audios into speech chunks using silero VAD
Parameters
----------
audio: torch.Tensor, one dimensional
One dimensional float torch.Tensor, other types are casted to torch if possible
model: preloaded .jit silero VAD model
threshold: float (default - 0.5)
Speech threshold. Silero VAD outputs speech probabilities for each audio chunk, probabilities ABOVE this value are considered as SPEECH.
It is better to tune this parameter for each dataset separately, but "lazy" 0.5 is pretty good for most datasets.
sampling_rate: int (default - 16000)
Currently silero VAD models support 8000 and 16000 sample rates
min_speech_duration_ms: int (default - 250 milliseconds)
Final speech chunks shorter min_speech_duration_ms are thrown out
max_speech_duration_s: int (default - inf)
Maximum duration of speech chunks in seconds
Chunks longer than max_speech_duration_s will be split at the timestamp of the last silence that lasts more than 100ms (if any), to prevent agressive cutting.
Otherwise, they will be split aggressively just before max_speech_duration_s.
min_silence_duration_ms: int (default - 100 milliseconds)
In the end of each speech chunk wait for min_silence_duration_ms before separating it
window_size_samples: int (default - 1536 samples)
Audio chunks of window_size_samples size are fed to the silero VAD model.
WARNING! Silero VAD models were trained using 512, 1024, 1536 samples for 16000 sample rate and 256, 512, 768 samples for 8000 sample rate.
Values other than these may affect model perfomance!!
speech_pad_ms: int (default - 30 milliseconds)
Final speech chunks are padded by speech_pad_ms each side
return_seconds: bool (default - False)
whether return timestamps in seconds (default - samples)
visualize_probs: bool (default - False)
whether draw prob hist or not
progress_tracking_callback: Callable[[float], None] (default - None)
callback function taking progress in percents as an argument
Returns
----------
speeches: list of dicts
list containing ends and beginnings of speech chunks (samples or seconds based on return_seconds)
"""
if not torch.is_tensor(audio):
try:
audio = torch.Tensor(audio)
except:
raise TypeError("Audio cannot be casted to tensor. Cast it manually")
if len(audio.shape) > 1:
for i in range(len(audio.shape)): # trying to squeeze empty dimensions
audio = audio.squeeze(0)
if len(audio.shape) > 1:
raise ValueError("More than one dimension in audio. Are you trying to process audio with 2 channels?")
if sampling_rate > 16000 and (sampling_rate % 16000 == 0):
step = sampling_rate // 16000
sampling_rate = 16000
audio = audio[::step]
warnings.warn('Sampling rate is a multiply of 16000, casting to 16000 manually!')
else:
step = 1
if sampling_rate == 8000 and window_size_samples > 768:
warnings.warn('window_size_samples is too big for 8000 sampling_rate! Better set window_size_samples to 256, 512 or 768 for 8000 sample rate!')
if window_size_samples not in [256, 512, 768, 1024, 1536]:
warnings.warn('Unusual window_size_samples! Supported window_size_samples:\n - [512, 1024, 1536] for 16000 sampling_rate\n - [256, 512, 768] for 8000 sampling_rate')
model.reset_states()
min_speech_samples = sampling_rate * min_speech_duration_ms / 1000
speech_pad_samples = sampling_rate * speech_pad_ms / 1000
max_speech_samples = sampling_rate * max_speech_duration_s - window_size_samples - 2 * speech_pad_samples
min_silence_samples = sampling_rate * min_silence_duration_ms / 1000
min_silence_samples_at_max_speech = sampling_rate * 98 / 1000
audio_length_samples = len(audio)
speech_probs = []
for current_start_sample in range(0, audio_length_samples, window_size_samples):
chunk = audio[current_start_sample: current_start_sample + window_size_samples]
if len(chunk) < window_size_samples:
chunk = torch.nn.functional.pad(chunk, (0, int(window_size_samples - len(chunk))))
speech_prob = model(chunk, sampling_rate).item()
speech_probs.append(speech_prob)
# caculate progress and seng it to callback function
progress = current_start_sample + window_size_samples
if progress > audio_length_samples:
progress = audio_length_samples
progress_percent = (progress / audio_length_samples) * 100
if progress_tracking_callback:
progress_tracking_callback(progress_percent)
triggered = False
speeches = []
current_speech = {}
neg_threshold = threshold - 0.15
temp_end = 0 # to save potential segment end (and tolerate some silence)
prev_end = next_start = 0 # to save potential segment limits in case of maximum segment size reached
for i, speech_prob in enumerate(speech_probs):
if (speech_prob >= threshold) and temp_end:
temp_end = 0
if next_start < prev_end:
next_start = window_size_samples * i
if (speech_prob >= threshold) and not triggered:
triggered = True
current_speech['start'] = window_size_samples * i
continue
if triggered and (window_size_samples * i) - current_speech['start'] > max_speech_samples:
if prev_end:
current_speech['end'] = prev_end
speeches.append(current_speech)
current_speech = {}
if next_start < prev_end: # previously reached silence (< neg_thres) and is still not speech (< thres)
triggered = False
else:
current_speech['start'] = next_start
prev_end = next_start = temp_end = 0
else:
current_speech['end'] = window_size_samples * i
speeches.append(current_speech)
current_speech = {}
prev_end = next_start = temp_end = 0
triggered = False
continue
if (speech_prob < neg_threshold) and triggered:
if not temp_end:
temp_end = window_size_samples * i
if ((window_size_samples * i) - temp_end) > min_silence_samples_at_max_speech : # condition to avoid cutting in very short silence
prev_end = temp_end
if (window_size_samples * i) - temp_end < min_silence_samples:
continue
else:
current_speech['end'] = temp_end
if (current_speech['end'] - current_speech['start']) > min_speech_samples:
speeches.append(current_speech)
current_speech = {}
prev_end = next_start = temp_end = 0
triggered = False
continue
if current_speech and (audio_length_samples - current_speech['start']) > min_speech_samples:
current_speech['end'] = audio_length_samples
speeches.append(current_speech)
for i, speech in enumerate(speeches):
if i == 0:
speech['start'] = int(max(0, speech['start'] - speech_pad_samples))
if i != len(speeches) - 1:
silence_duration = speeches[i+1]['start'] - speech['end']
if silence_duration < 2 * speech_pad_samples:
speech['end'] += int(silence_duration // 2)
speeches[i+1]['start'] = int(max(0, speeches[i+1]['start'] - silence_duration // 2))
else:
speech['end'] = int(min(audio_length_samples, speech['end'] + speech_pad_samples))
speeches[i+1]['start'] = int(max(0, speeches[i+1]['start'] - speech_pad_samples))
else:
speech['end'] = int(min(audio_length_samples, speech['end'] + speech_pad_samples))
if return_seconds:
for speech_dict in speeches:
speech_dict['start'] = round(speech_dict['start'] / sampling_rate, 1)
speech_dict['end'] = round(speech_dict['end'] / sampling_rate, 1)
elif step > 1:
for speech_dict in speeches:
speech_dict['start'] *= step
speech_dict['end'] *= step
if visualize_probs:
make_visualization(speech_probs, window_size_samples / sampling_rate)
return speeches
def get_number_ts(wav: torch.Tensor,
model,
model_stride=8,
hop_length=160,
sample_rate=16000):
wav = torch.unsqueeze(wav, dim=0)
perframe_logits = model(wav)[0]
perframe_preds = torch.argmax(torch.softmax(perframe_logits, dim=1), dim=1).squeeze() # (1, num_frames_strided)
extended_preds = []
for i in perframe_preds:
extended_preds.extend([i.item()] * model_stride)
# len(extended_preds) is *num_frames_real*; for each frame of audio we know if it has a number in it.
triggered = False
timings = []
cur_timing = {}
for i, pred in enumerate(extended_preds):
if pred == 1:
if not triggered:
cur_timing['start'] = int((i * hop_length) / (sample_rate / 1000))
triggered = True
elif pred == 0:
if triggered:
cur_timing['end'] = int((i * hop_length) / (sample_rate / 1000))
timings.append(cur_timing)
cur_timing = {}
triggered = False
if cur_timing:
cur_timing['end'] = int(len(wav) / (sample_rate / 1000))
timings.append(cur_timing)
return timings
def get_language(wav: torch.Tensor,
model):
wav = torch.unsqueeze(wav, dim=0)
lang_logits = model(wav)[2]
lang_pred = torch.argmax(torch.softmax(lang_logits, dim=1), dim=1).item() # from 0 to len(languages) - 1
assert lang_pred < len(languages)
return languages[lang_pred]
def get_language_and_group(wav: torch.Tensor,
model,
lang_dict: dict,
lang_group_dict: dict,
top_n=1):
wav = torch.unsqueeze(wav, dim=0)
lang_logits, lang_group_logits = model(wav)
softm = torch.softmax(lang_logits, dim=1).squeeze()
softm_group = torch.softmax(lang_group_logits, dim=1).squeeze()
srtd = torch.argsort(softm, descending=True)
srtd_group = torch.argsort(softm_group, descending=True)
outs = []
outs_group = []
for i in range(top_n):
prob = round(softm[srtd[i]].item(), 2)
prob_group = round(softm_group[srtd_group[i]].item(), 2)
outs.append((lang_dict[str(srtd[i].item())], prob))
outs_group.append((lang_group_dict[str(srtd_group[i].item())], prob_group))
return outs, outs_group
class VADIterator:
def __init__(self,
model,
threshold: float = 0.5,
sampling_rate: int = 16000,
min_silence_duration_ms: int = 100,
speech_pad_ms: int = 30
):
"""
Class for stream imitation
Parameters
----------
model: preloaded .jit silero VAD model
threshold: float (default - 0.5)
Speech threshold. Silero VAD outputs speech probabilities for each audio chunk, probabilities ABOVE this value are considered as SPEECH.
It is better to tune this parameter for each dataset separately, but "lazy" 0.5 is pretty good for most datasets.
sampling_rate: int (default - 16000)
Currently silero VAD models support 8000 and 16000 sample rates
min_silence_duration_ms: int (default - 100 milliseconds)
In the end of each speech chunk wait for min_silence_duration_ms before separating it
speech_pad_ms: int (default - 30 milliseconds)
Final speech chunks are padded by speech_pad_ms each side
"""
self.model = model
self.threshold = threshold
self.sampling_rate = sampling_rate
if sampling_rate not in [8000, 16000]:
raise ValueError('VADIterator does not support sampling rates other than [8000, 16000]')
self.min_silence_samples = sampling_rate * min_silence_duration_ms / 1000
self.speech_pad_samples = sampling_rate * speech_pad_ms / 1000
self.reset_states()
def reset_states(self):
self.model.reset_states()
self.triggered = False
self.temp_end = 0
self.current_sample = 0
def __call__(self, x, return_seconds=False):
"""
x: torch.Tensor
audio chunk (see examples in repo)
return_seconds: bool (default - False)
whether return timestamps in seconds (default - samples)
"""
if not torch.is_tensor(x):
try:
x = torch.Tensor(x)
except:
raise TypeError("Audio cannot be casted to tensor. Cast it manually")
window_size_samples = len(x[0]) if x.dim() == 2 else len(x)
self.current_sample += window_size_samples
speech_prob = self.model(x, self.sampling_rate).item()
if (speech_prob >= self.threshold) and self.temp_end:
self.temp_end = 0
if (speech_prob >= self.threshold) and not self.triggered:
self.triggered = True
speech_start = self.current_sample - self.speech_pad_samples
return {'start': int(speech_start) if not return_seconds else round(speech_start / self.sampling_rate, 1)}
if (speech_prob < self.threshold - 0.15) and self.triggered:
if not self.temp_end:
self.temp_end = self.current_sample
if self.current_sample - self.temp_end < self.min_silence_samples:
return None
else:
speech_end = self.temp_end + self.speech_pad_samples
self.temp_end = 0
self.triggered = False
return {'end': int(speech_end) if not return_seconds else round(speech_end / self.sampling_rate, 1)}
return None
def collect_chunks(tss: List[dict],
wav: torch.Tensor):
chunks = []
for i in tss:
chunks.append(wav[i['start']: i['end']])
return torch.cat(chunks)
def drop_chunks(tss: List[dict],
wav: torch.Tensor):
chunks = []
cur_start = 0
for i in tss:
chunks.append((wav[cur_start: i['start']]))
cur_start = i['end']
return torch.cat(chunks)
def versiontuple(v):
splitted = v.split('+')[0].split(".")
version_list = []
for i in splitted:
try:
version_list.append(int(i))
except:
version_list.append(0)
return tuple(version_list)
def silero_vad(speech,onnx=False, force_onnx_cpu=False):
"""Silero Voice Activity Detector
Returns a model with a set of utils
Please see https://github.com/snakers4/silero-vad for usage examples
"""
SAMPLING_RATE = 16000
if not onnx:
installed_version = torch.__version__
supported_version = '1.12.0'
if versiontuple(installed_version) < versiontuple(supported_version):
raise Exception(f'Please install torch {supported_version} or greater ({installed_version} installed)')
#model_dir = os.path.join(os.path.dirname(__file__),
model_dir = sys.prefix
if onnx:
model = OnnxWrapper(os.path.join(model_dir, 'silero_vad.onnx'), force_onnx_cpu)
else:
model = init_jit_model(os.path.join(model_dir, 'silero_vad.jit'))
utils = (get_speech_timestamps,
save_audio,
read_audio,
VADIterator,
collect_chunks)
wav = read_audio(speech, sampling_rate=SAMPLING_RATE)
# get speech timestamps from full audio file
speech_timestamps = get_speech_timestamps(wav, model, sampling_rate=SAMPLING_RATE)
#print(speech_timestamps)
speech_section = [[a['start']/SAMPLING_RATE,a['end']/SAMPLING_RATE] for a in speech_timestamps]
return speech_section
def silero_number_detector(onnx=False, force_onnx_cpu=False):
"""Silero Number Detector
Returns a model with a set of utils
Please see https://github.com/snakers4/silero-vad for usage examples
"""
raise NotImplementedError('This model has been deprecated and is not supported anymore.')
if onnx:
url = 'https://models.silero.ai/vad_models/number_detector.onnx'
else:
url = 'https://models.silero.ai/vad_models/number_detector.jit'
model = Validator(url, force_onnx_cpu)
utils = (get_number_ts,
save_audio,
read_audio,
collect_chunks,
drop_chunks)
return model, utils
def silero_lang_detector(onnx=False, force_onnx_cpu=False):
"""Silero Language Classifier
Returns a model with a set of utils
Please see https://github.com/snakers4/silero-vad for usage examples
"""
raise NotImplementedError('This model has been deprecated and is not supported anymore.')
if onnx:
url = 'https://models.silero.ai/vad_models/number_detector.onnx'
else:
url = 'https://models.silero.ai/vad_models/number_detector.jit'
model = Validator(url, force_onnx_cpu)
utils = (get_language,
read_audio)
return model, utils
def silero_lang_detector_95(onnx=False, force_onnx_cpu=False):
"""Silero Language Classifier (95 languages)
Returns a model with a set of utils
Please see https://github.com/snakers4/silero-vad for usage examples
"""
raise NotImplementedError('This model has been deprecated and is not supported anymore.')
if onnx:
url = 'https://models.silero.ai/vad_models/lang_classifier_95.onnx'
else:
url = 'https://models.silero.ai/vad_models/lang_classifier_95.jit'
model = Validator(url, force_onnx_cpu)
model_dir = os.path.join(os.path.dirname(__file__), 'files')
with open(os.path.join(model_dir, 'lang_dict_95.json'), 'r') as f:
lang_dict = json.load(f)
with open(os.path.join(model_dir, 'lang_group_dict_95.json'), 'r') as f:
lang_group_dict = json.load(f)
utils = (get_language_and_group, read_audio)
return model, lang_dict, lang_group_dict, utils
if __name__ == '__main__':
print(silero_vad('pre_process.wav'))
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/VAD_NN/hubconf.py
|
hubconf.py
|
from socket import *
import threading
import json
import os
address='10.219.36.124'
port=2159
buffsize=1024
s = socket(AF_INET, SOCK_STREAM)
s.bind((address,port))
s.listen(100) #max connect
conn_list = []
conn_dt = {}
requeststack = {}
def tcplink(sock,addr):
while True:
try:
recvdata=sock.recv(buffsize).decode('utf-8')
curdic = json.loads(recvdata)
if curdic['module'] == 'clientA' and curdic['method'] == 'requestA':
requeststack[curdic['token']] = [0,'',[curdic['srcFile'],curdic['testFile'],curdic['samplerate']]]
while True:
if requeststack[curdic['token']][1] != '':
oneresult = requeststack.pop(curdic['token'])[1]
curdic['result'] = oneresult
sock.send(bytes(json.dumps(curdic), encoding='utf-8'))
os.system("rm -rf /home/netease/polqa/" + curdic['token'])
break
if curdic['module'] == 'clientB' and curdic['method'] == 'requestB':
if len(requeststack) != 0:
for onekey in requeststack:
if requeststack[onekey][0] == 0:
requeststack[onekey][0] = 1
curdic['token'] = onekey
curdic['job'] = 'occupy'
curdic['srcFile'] = requeststack[onekey][2][0]
curdic['testFile'] = requeststack[onekey][2][1]
curdic['samplerate'] = requeststack[onekey][2][2]
sock.send(bytes(json.dumps(curdic), encoding='utf-8'))
break
else:
curdic['job'] = None
sock.send(bytes(json.dumps(curdic),encoding='utf-8'))
else:
curdic['job'] = None
sock.send(bytes(json.dumps(curdic),encoding='utf-8'))
if curdic['module'] == 'clientB' and curdic['method'] == 'response':
requeststack[curdic['token']][1] = curdic['result']
sock.send(bytes(json.dumps(curdic),encoding='utf-8'))
if not recvdata:
break
except:
sock.close()
print(addr,'offline')
_index = conn_list.index(addr)
conn_dt.pop(addr)
conn_list.pop(_index)
break
def recs():
while True:
clientsock,clientaddress=s.accept()
if clientaddress not in conn_list:
conn_list.append(clientaddress)
conn_dt[clientaddress] = clientsock
print('connect from:',clientaddress)
#
t=threading.Thread(target=tcplink,args=(clientsock,clientaddress))
t.start()
if __name__ == '__main__':
t1 = threading.Thread(target=recs, args=(), name='rec')
t1.start()
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/POLQA/file_server.py
|
file_server.py
|
import time
import copy
import sys,os
from os import path
sys.path.append(os.path.dirname(path.dirname(__file__)))
from commFunction import global_result,sftp_connect,sftp_get,sftp_disconnect,constMosResult
import shutil
from socketClient import SocketClient
from POLQA import startvqt
def exec_polqa_test():
while(True):
try:
socket = SocketClient(global_result.machost,global_result.PORT)
curdata = global_result.get_data()
curdata['module'] = 'clientB'
curdata['method'] = 'requestB'
curruslt = socket.sender(curdata)
except:
socket.close()
continue
if curruslt['job'] is None or str(curruslt['job']) == 'null':
continue
#try:
print('processing')
#检查文件
#链接sftp
client,sftp = sftp_connect(global_result.username,global_result.password,global_result.HOST,port=global_result.sftpPort)
sftp_get(sftp, '/home/netease/polqa/' + curruslt['token'], '')
sftp_disconnect(client)
srf ,tsf, fpath,sr = curruslt['token'] +'/'+ curruslt['srcFile'],curruslt['token'] +'/'+ curruslt['testFile'],curruslt['token'],curruslt['samplerate']
if not os.path.exists(srf) or not os.path.exists(tsf):
curruslt['result'] = 'lack of input files!'
socket = SocketClient(global_result.machost, global_result.PORT)
curruslt['module'] = 'clientB'
curruslt['method'] = 'response'
try:
douresult = socket.sender(curruslt)
except:
socket.close()
shutil.rmtree(fpath)
print(fpath,' is not exist')
continue
# samplerate = info[3]
global_result.mosResult = copy.deepcopy(constMosResult)
startvqt(os.path.abspath(srf), os.path.abspath(tsf), sr)
if '-0.0' == global_result.mosResult['mos']:
time.sleep(5)
curruslt['result'] = global_result.mosResult
socket = SocketClient(global_result.machost, global_result.PORT)
curruslt['module'] = 'clientB'
curruslt['method'] = 'response'
print(curruslt)
try:
socket.sender(curruslt)
except:
socket.close()
time.sleep(1)
shutil.rmtree(fpath)
print('file was not deleted!')
if __name__ == '__main__':
exec_polqa_test() #0 从头开始 #-1 从尾开始
|
AlgorithmLib
|
/AlgorithmLib-4.0.3.tar.gz/AlgorithmLib-4.0.3/algorithmLib/POLQA/polqa_server.py
|
polqa_server.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.