code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x<style>"+b+"</style>",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="<xyz></xyz>",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document);
|
Appium-Python-Client
|
/Appium-Python-Client-2.11.1.tar.gz/Appium-Python-Client-2.11.1/docs/_build/html/_static/js/html5shiv.min.js
|
html5shiv.min.js
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
import time
from appium import webdriver
from selenium.webdriver.common.by import By
from automation.mobile.platforms import Platform
from automation.mobile.uicomponents import UIComponents
import os
# Returns abs path relative to this file and not cwd
PATH = lambda p: os.path.abspath(
os.path.join(os.path.dirname(__file__), p)
)
def collect_prerequisites():
"""
Collects all prerequisites required to run the UIAutomation test case.
:return:
"""
print("1 - iOS")
print("2 - Android")
platform = int(input("Choose platform:"))
if 1 < platform > 2:
print('You have selected an invalid platform')
exit(1)
app_bundle = input("Enter path to app bundle:")
if app_bundle.__len__() == 0:
print('Can not test an app which does not exist ;)')
exit(1)
return {
"platform": platform,
"app_bundle": app_bundle
}
class MobDriver(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False):
super(MobDriver, self).__init__(command_executor, desired_capabilities, browser_profile, proxy,
keep_alive)
# get copy of desired_capabilities dict
self.desired_caps = desired_capabilities
# identify platform
if self.desired_caps['platformName'] == "Android":
self.platform = Platform.ANDROID
elif self.desired_caps['platformName'] == "iOS":
self.platform = Platform.IOS
else:
self.platform = Platform.UNKNOWN
def find_alert(self):
"""
Finds alert and return it as named tuple 'Alert(alert=alrt, text=text, btns=btns)'
This is just a temporary workaround till Appium solves swith_to.alert issue.
:return:
"""
Alert = namedtuple('Alert', ['alert', 'text', 'btns'])
if self.platform == Platform.IOS:
lookup_xpath = UIComponents.ALERT.iOS
alrt = self.find_element_by_xpath(lookup_xpath)
text = alrt.find_element_by_xpath(UIComponents.LABEL.iOS.format('@*')).text
btns = alrt.find_elements_by_xpath(UIComponents.BUTTON.iOS.format('@*'))
elif self.platform == Platform.ANDROID:
lookup_xpath = UIComponents.ALERT.Android
alrt = self.find_element_by_xpath(lookup_xpath)
text = alrt.find_element_by_xpath(UIComponents.LABEL.Android.format('@*')).text
btns = alrt.find_elements_by_xpath(UIComponents.BUTTON.Android.format('@*'))
alert = Alert(alert=alrt, text=text, btns=btns)
return alert
def check_alert(self, msg=None, btn_index=0):
"""
This method will check alert and compare the message
:param msg: If provided, will be compared with alert message. Will return false if msg does not match
:param btn_index: If provided, will be clicked to dismissed alert.
:return: Will return named tuple 'Result'. Which as two params, 'result':bool and 'msg':str.
"""
time.sleep(2)
Result = namedtuple('Result', ['result', 'msg'])
try:
alert = self.find_alert()
if alert is not None:
alert.btns[btn_index].click()
if msg is not None:
success = (alert.text == msg)
return Result(result=success, msg='Wrong alert msg')
else:
alert.btns[btn_index].click()
return Result(result=True, msg='No alert msg')
else:
return Result(result=False, msg='No alert found')
except:
return Result(result=False, msg='There is some error')
def find_by_name(self, widget_type, name):
"""
:param name: Name of the component
:param widget_type: Type of widget
:return: returns labeled element of type
"""
time.sleep(2)
if self.platform == Platform.IOS:
lookup_xpath = widget_type.iOS.format("@hint='{0}' or @value='{0}' or @label='{0}' or @name='{0}'")
elif self.platform == Platform.ANDROID:
lookup_xpath = widget_type.Android.format("@text='{0}'")
lookup_xpath = lookup_xpath.format(name)
return self.find_element_by_xpath(lookup_xpath)
def find_by_index(self, widget_type, index):
"""
:param index: Index of component to return from list of found elements
:param widget_type: Type of widget
:return: returns element of type at index
"""
time.sleep(2)
if self.platform == Platform.IOS:
lookup_xpath = widget_type.iOS
elif self.platform == Platform.ANDROID:
lookup_xpath = widget_type.Android
return self.find_element_by_xpath(lookup_xpath.format(index))
|
Appium-UIAutomation
|
/Appium-UIAutomation-0.1.0.tar.gz/Appium-UIAutomation-0.1.0/automation/mobile/mobdriver.py
|
mobdriver.py
|
[GitHub](https://github.com/shigebeyond/AppiumBoot) | [Gitee](https://gitee.com/shigebeyond/AppiumBoot)
[English document](blob/master/README.en.md)
# AppiumBoot - yaml驱动Appium测试
## 概述
Appium是移动端的自动化测试工具,但是要写python代码;
考虑到部分测试伙伴python能力不足,因此扩展Appium,支持通过yaml配置测试步骤;
框架通过编写简单的yaml, 就可以执行一系列复杂的 App 操作步骤, 如点击/输入/拖拽/上下滑/左右滑/放大缩小/提取变量/打印变量等,极大的简化了伙伴编写自动化测试脚本的工作量与工作难度,大幅提高人效;
框架通过提供类似python`for`/`if`/`break`语义的步骤动作,赋予伙伴极大的开发能力与灵活性,能适用于广泛的测试场景。
框架提供`include`机制,用来加载并执行其他的步骤yaml,一方面是功能解耦,方便分工,一方面是功能复用,提高效率与质量,从而推进测试整体的工程化。
## 特性
1. 基于 Appium 的webdriver
2. 支持通过yaml来配置执行的步骤,简化了自动化测试开发:
每个步骤可以有多个动作,但单个步骤中动作名不能相同(yaml语法要求);
动作代表webdriver上的一种操作,如tap/swipe/scoll等等;
3. 支持复杂的手势: 拖拽/上下滑/左右滑/放大缩小/多个点组成的移动轨迹等;
4. 支持提取器
5. 支持校验器
6. 支持识别验证码(使用有道ocr)
7. 支持类似python`for`/`if`/`break`语义的步骤动作,灵活适应各种场景
8. 支持`include`引用其他的yaml配置文件,以便解耦与复用
## todo
1. 支持更多的动作
## 安装
```
pip3 install AppiumBoot
```
安装后会生成命令`AppiumBoot`;
注: 对于深度deepin-linux系统,生成的命令放在目录`~/.local/bin`,建议将该目录添加到环境变量`PATH`中,如
```
export PATH="$PATH:/home/shi/.local/bin"
```
## 使用
1. 先启动 appium
2. 修改配置文件(yml)中的 `init_driver` 动作的参数, 如平台、app包等
3. 使用
```
# 1 执行单个文件
AppiumBoot 步骤配置文件.yml
# 2 执行多个文件
AppiumBoot 步骤配置文件1.yml 步骤配置文件2.yml ...
# 3 执行单个目录, 即执行该目录下所有的yml文件
AppiumBoot 步骤配置目录
# 4 执行单个目录下的指定模式的文件
AppiumBoot 步骤配置目录/step-*.yml
```
- 如执行 `AppiumBoot example/step-material.yml`:
你需要先安装[android material组件demo app](https://gitee.com/lizhenghaodamowang/material-components-android);
效果见[演示视频](https://www.zhihu.com/zvideo/1542517089130147840);
输出如下:
```
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/step-material.yml
handle action: init_driver={'executor': 'http://localhost:4723/wd/hub', 'desired_caps': {'platformName': 'Android', 'platformVersion': '9', 'deviceName': 'f978cc97', 'appPackage': 'io.material.catalog', 'appActy': 'io.material.catalog.main.MainActivity', 'automationName': 'UiAutomator2', 'noReset': True}}
handle action: include=material/comp1.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/comp1.yml
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout'}
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/cat_demo_landing_row_root'}
handle action: swipe_up=None
handle action: sleep=1
handle action: swipe_down=None
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/end'}
handle action: sleep=2
handle action: click_by={'id': 'io.material.catalog:id/center'}
handle action: sleep=2
handle action: click_by={'id': 'io.material.catalog:id/attach_toggle'}
handle action: sleep=2
handle action: click_by={'id': 'io.material.catalog:id/center'}
handle action: include=material/back.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/back.yml
handle action: sleep=1
handle action: back=None
handle action: sleep=1
handle action: back=None
handle action: include=material/comp2.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/comp2.yml
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[2]/android.widget.LinearLayout'}
handle action: sleep=1
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/android.widget.ScrollView/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout'}
handle action: click_by={'aid': 'Page 2'}
handle action: click_by={'aid': 'Theme Switcher'}
handle action: sleep=1
handle action: click_by={'xpath': '(//android.widget.RadioButton[@content-desc="Green"])[1]'}
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/apply_button'}
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/add_button'}
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/remove_button'}
handle action: sleep=2
handle action: back=None
handle action: sleep=1
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/android.widget.ScrollView/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.RelativeLayout[3]'}
handle action: sleep=1
handle action: click_by={'aid': 'Alarm'}
handle action: sleep=1
handle action: click_by={'aid': 'Clock'}
handle action: sleep=1
handle action: click_by={'aid': 'Timer'}
handle action: sleep=1
handle action: click_by={'aid': 'Stopwatch'}
handle action: include=material/back.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/back.yml
handle action: sleep=1
handle action: back=None
handle action: sleep=1
handle action: back=None
handle action: include=material/comp3.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/comp3.yml
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[3]/android.widget.LinearLayout'}
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/cat_demo_landing_row_root'}
handle action: print=非全屏的上拉
非全屏的上拉
handle action: sleep=1
handle action: swipe_up=None
handle action: sleep=1
handle action: swipe_vertical=0.55,0.8
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/cat_fullscreen_switch'}
handle action: sleep=1
handle action: print=全屏的上拉
全屏的上拉
handle action: swipe_up=None
handle action: sleep=1
handle action: swipe_down=None
handle action: include=material/back.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/back.yml
handle action: sleep=1
handle action: back=None
handle action: sleep=1
handle action: back=None
handle action: include=material/comp4.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/material/comp4.yml
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[4]/android.widget.LinearLayout'}
handle action: sleep=1
handle action: click_by={'xpath': '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/aid.view.ViewGroup/android.widget.ScrollView/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout'}
handle action: click_by={'aid': 'Theme Switcher'}
handle action: sleep=1
handle action: click_by={'xpath': '(//android.widget.RadioButton[@content-desc="Yellow"])[1]'}
handle action: sleep=1
handle action: click_by={'id': 'io.material.catalog:id/apply_button'}
handle action: click_by={'id': 'io.material.catalog:id/material_button'}
handle action: sleep=1
handle action: back=None
......
```
命令会自动打开[android material组件demo app](https://gitee.com/lizhenghaodamowang/material-components-android),并按照步骤配置文件的描述来执行动作,如下拉、上拉、左滑、点击按钮等,一个个组件页面去操作
- 如执行 `AppiumBoot example/step-zhs.yml`:
你要先安装众划算app;
输出如下:
```
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/step-zhs.yml
handle action: init_driver={'executor': 'http://localhost:4723/wd/hub', 'desired_caps': {'platformName': 'Android', 'platformVersion': '9', 'deviceName': 'f978cc97', 'appPackage': 'com.zhs.zhonghuasuanapp', 'apivity': 'com.zhs.activity.StartActivity', 'automationName': 'UiAutomator2', 'noReset': True}}
handle action: sleep=7
handle action: click_by_if_exist={'id': 'com.zhs.zhonghuasuanapp:id/img_start'}
handle action: start_recording_screen=None
handle action: swipe_up=None
handle action: include=zhs/login.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/zhs/login.yml
handle action: click_by={'id': 'com.zhs.zhonghuasuanapp:id/tab_my_image'}
handle action: sleep=2
handle action: once=[{'moveon_if': "boot.exist_by('id', 'com.zhs.zhonghuasuanapp:id/tv_account_login')", 'click_by': {'id': 'com.zhs.zhonghuasuanapp:id/tv_account_login'}, 'sleep': 2}, {'input_by_id': {'com.zhsnghuasuanapp:id/edit_login_username': 'shigebeyond', 'com.zhs.zhonghuasuanapp:id/edit_login_password': 'shige123'}}, {'click_by': {'id': 'com.zhs.zhonghuasuanapp:id/box_user_agreement'}}, {'click_by': {'id': 'com.zhs.zhonghuasuanapp:id/login_submit'}, 'sleep': 4}, {'click_by_if_exist': {'id': 'com.zhs.zhonghuasuanapp:id/btn_i_know'}}, {'click_by_if_exist': {'id': 'com.zhs.zhonghuasuanapp:id/tv_hid_guid'}}]
-- For loop start: for(1) --
第1次迭代
handle action: moveon_if=boot.exist_by('id', 'com.zhs.zhonghuasuanapp:id/tv_account_login')
-- For loop break: for(1), break condition: not (boot.exist_by('id', 'com.zhs.zhonghuasuanapp:id/tv_account_login')) --
handle action: sleep=2
handle action: include=zhs/apply.yml
Load and run step file: /ohome/shi/code/python/AppiumBoot/example/zhs/apply.yml
handle action: click_by={'id': 'com.zhs.zhonghuasuanapp:id/tab_new_image'}
handle action: sleep=4
handle action: swipe_up=None
handle action: sleep=2
handle action: swipe_down=None
......
```
命令会自动打开众划算app,并按照步骤配置文件的描述来执行动作,如下拉、上拉、左滑、点击按钮等
## 步骤配置文件及demo
用于指定多个步骤, 示例见源码 [example](https://github.com/shigebeyond/AppiumBoot/tree/main/example) 目录下的文件;
顶级的元素是步骤;
每个步骤里有多个动作(如sleep),如果动作有重名,就另外新开一个步骤写动作,这是由yaml语法限制导致的,但不影响步骤执行。
[demo](https://github.com/shigebeyond/AppiumBoot/blob/main/example/)
[demo视频](https://www.zhihu.com/zvideo/1542517089130147840)
## 查找元素的方法
1. id: 根据 id 属性值来查找, 对应`By.ID`
2. sid: 根据 accessibility_id 属性值来查找, 对应`By.ACCESSIBILITY_ID`
3. class: 根据类名来查找, 对应`By.CLASS_NAME`
4. xpath: 根据 xpath 来查找, 对应`By.XPATH`
## 配置详解
支持通过yaml来配置执行的步骤;
每个步骤可以有多个动作,但单个步骤中动作名不能相同(yaml语法要求);
动作代表webdriver上的一种操作,如tap/swipe/scoll等等;
下面详细介绍每个动作:
1. init_driver: 初始化driver
```yaml
init_driver:
executor: http://localhost:4723/wd/hub
desired_caps:
platformName: Android
platformVersion: '9'
deviceName: f978cc97
appPackage: io.material.catalog
appActivity: io.material.catalog.main.MainActivity
automationName: UiAutomator2
noReset: true
```
2. close_driver: 关闭driver
```yaml
close_driver:
```
3. sleep: 线程睡眠;
```yaml
sleep: 2 # 线程睡眠2秒
```
4. print: 打印, 支持输出变量/函数;
```yaml
# 调试打印
print: "总申请数=${dyn_data.total_apply}, 剩余份数=${dyn_data.quantity_remain}"
```
变量格式:
```
$msg 一级变量, 以$为前缀
${data.msg} 多级变量, 用 ${ 与 } 包含
```
函数格式:
```
${random_str(6)} 支持调用函数,目前仅支持以下几个函数: random_str/random_int/random_element/incr
```
函数罗列:
```
random_str(n): 随机字符串,参数n是字符个数
random_int(n): 随机数字,参数n是数字个数
random_element(var): 从list中随机挑选一个元素,参数var是list类型的变量名
incr(key): 自增值,从1开始,参数key表示不同的自增值,不同key会独立自增
```
5. input_by_id: 填充 id 指定的输入框;
```yaml
input_by_id:
# 输入框id: 填充的值(支持写变量)
'io.material.catalog:id/cat_demo_input': '18877310999'
```
6. input_by_aid: 填充 accessibility_id 指定的输入框;
```yaml
input_by_aid:
# 输入框accessibility_id: 填充的值(支持写变量)
'Input name': '18877310999'
```
7. input_by_class: 填充 指定类名的输入框;
```yaml
input_by_class:
# 输入框类名: 填充的值(支持写变量)
'android.widget.EditText': '18877310999'
```
8. input_by_xpath: 填充 xpath 指定的输入框;
```yaml
input_by_xpath:
# 输入框xpath路径: 填充的值(支持写变量)
'/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.ScrollView/android.widget.LinearLayout/android.widget.LinearLayout[1]/android.widget.FrameLayout/android.widget.EditText': aaa
```
9. hide_keyboard: 隐藏键盘
```yaml
hide_keyboard:
```
10. swipe: 屏幕横扫(传坐标)
```yaml
swipe:
from: 100,100 # 起点坐标
to: 200,200 # 终点坐标
duration: 2 # 耗时秒数, 可省
```
11. swipe_up: 上滑(传比例)
```yaml
swipe_up: 0.55 # 移动幅度比例(占屏幕高度的比例)
swipe_up: # 默认移动幅度比例为0.5
```
12. swipe_down: 下滑(传比例)
```yaml
swipe_down: 0.55 # 移动幅度比例(占屏幕高度的比例)
swipe_down: # 默认移动幅度比例为0.5
```
13. swipe_left: 左滑(传y坐标)
```yaml
swipe_left: 100 # y坐标
swipe_left: # 默认y坐标为中间
```
14. swipe_right: 右滑(传y坐标)
```yaml
swipe_right: 100 # y坐标
swipe_right: # 默认y坐标为中间
```
15. swipe_vertical: 垂直方向(上下)滑动(传比例)
```yaml
swipe_vertical: 0.2,0.7 # y轴起点/终点位置在屏幕的比例,如 0.2,0.7,即y轴上从屏幕0.2比例处滑到0.7比例处
```
16. swipe_horizontal: 水平方向(左右)滑动(传比例)
```yaml
swipe_horizontal: 0.2,0.7 # x轴起点/终点位置在屏幕的比例,如 0.2,0.7,即x轴上从屏幕0.2比例处滑到0.7比例处
```
17. move_track: 移动轨迹(传坐标序列)
```yaml
move_track: '800,1600;100,1600;100,600;800,600;360,600;360,1100' # 坐标序列,坐标之间使用;分割,如x1,y1;x2,y2
```
18. drag_and_drop_by: 拖拽(传元素): 从一个元素滑动到另一个元素,第二个元素替代第一个元素原本屏幕上的位置
```yaml
drag_and_drop_by:
by: xpath # 元素查找方式: id/sid/class/xpath
from: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[4]/android.widget.LinearLayout
to: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[2]/android.widget.LinearLayout
```
19. scroll_by: 滚动(传元素): 从一个元素滚动到另一元素,直到页面自动停止(有惯性)
```yaml
scroll_by:
by: xpath # 元素查找方式: id/sid/class/xpath
from: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[4]/android.widget.LinearLayout
to: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[2]/android.widget.LinearLayout
```
20. move_by: 移动(传元素): 从一个元素移动到另一元素,无惯性
```yaml
move_by:
by: xpath # 元素查找方式: id/sid/class/xpath
from: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[4]/android.widget.LinearLayout
to: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[2]/android.widget.LinearLayout
```
21. zoom_in: 放大
```yaml
zoom_in:
```
22. zoom_out: 缩小
```yaml
zoom_out:
```
23. tap: 敲击屏幕(传坐标)
```yaml
tap: 200,200
```
24. tap_by: 敲击元素
```yaml
tap_by:
# 元素查找方式(id/sid/class/xpath) : 查找的值
#id: io.material.catalog:id/cat_demo_landing_row_root
xpath: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout # 按钮的xpath路径
# 耗时秒数, 可省, 可用于模拟长按
duration: 10
```
25. click_by/click_by_if_exist: 点击元素;
```yaml
click_by:
# 元素查找方式(id/sid/class/xpath) : 查找的值
#id: io.material.catalog:id/cat_demo_landing_row_root
xpath: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout # 按钮的xpath路径
```
如果点击之前要先判断元素是否存在,则换用 click_by_if_exist
26. shake: 摇一摇
```yaml
shake:
```
27. set_orientation: 设置屏幕方向
```yaml
set_orientation: true # 是否竖屏, 否则横屏
```
28. set_location: 设置地理位置
```yaml
set_location: 49,123 # 纬度,经度
set_location: 49,123,10 # 纬度,经度,海拔高度
```
29. screenshot: 整个窗口截图存为png;
```yaml
screenshot:
save_dir: downloads # 保存的目录,默认为 downloads
save_file: test.png # 保存的文件名,默认为:时间戳.png
```
30. screenshot_element_by: 对某个标签截图存为png;
```yaml
screenshot_element_by
# 元素查找方式(id/sid/class/xpath) : 查找的值
#id: io.material.catalog:id/cat_demo_landing_row_root
xpath: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout
save_dir: downloads # 保存的目录,默认为 downloads
save_file: test.png # 保存的文件名,默认为:时间戳.png
```
31. execute_js: 执行js;
```yaml
execute_js: alert('hello world')
```
32. back: 返回键;
```yaml
back:
```
33. keyevent: 模拟系统键;
```yaml
keyevent: '4'
```
34. open_notifications: 打开手机的通知栏;
```yaml
open_notifications:
```
35. get_clipboard: 读剪切板内容;
```yaml
get_clipboard: name # 参数为记录剪切板内容的变量名
```
36. set_clipboard: 写剪切板内容;
```yaml
set_clipboard: hello world $name # 参数是写入内容,可带参数
```
37. push_file:推文件到手机上, 即写手机上文件;
```yaml
push_file:
to: /storage/emulated/0/documents/test/a.txt # 写入的手机上的文件
content: helloworld # 写入的内容, content与to只能二选一
#from: a.txt # 写入内容的本地来源文件, content与to只能二选一
```
38. pull_file:从手机中拉文件, 即读手机上的文件;
```yaml
pull_file:
from: /storage/emulated/0/documents/test/a.txt # 读取的手机上的文件
to: a.txt # 写入的本地文件, 可省
var: content # 记录文件内容的变量, 可省
print: $content
```
39. send_sms:发送短信;
```yaml
send_sms:
phone: 13475556022
content: hello $name
```
40. print_performance:打印性能信息;
```yaml
print_performance:
```
41. start_recording_screen:开始录屏;
start_recording_screen 与 stop_recording_screen 配合使用(start在前,stop在后)
```yaml
start_recording_screen:
```
42. stop_recording_screen:结束录屏,并存为视频文件;
start_recording_screen 与 stop_recording_screen 配合使用(start在前,stop在后), 如果两者之间的执行发生异常, 则系统会主动调用后续第一个stop_recording_screen动作, 以便记录好异常的全过程
```yaml
stop_recording_screen: # 默认视频文件路径为 `record-时间.mp4`
stop_recording_screen: a.mp4 # 视频文件路径
```
43. alert_accept: 点击弹框的确定按钮, 如授权弹框的允许;
```yaml
alert_accept:
```
44. alert_dismiss: 取消弹框, 如授权弹框的禁止;
```yaml
alert_dismiss:
```
45. for: 循环;
for动作下包含一系列子步骤,表示循环执行这系列子步骤;变量`for_i`记录是第几次迭代(从1开始),变量`for_v`记录是每次迭代的元素值(仅当是list类型的变量迭代时有效)
```yaml
# 循环3次
for(3) :
# 每次迭代要执行的子步骤
- swipe_down:
sleep: 2
# 循环list类型的变量values
for(values) :
# 每次迭代要执行的子步骤
- swipe_down:
sleep: 2
# 无限循环,直到遇到跳出动作
# 有变量for_i记录是第几次迭代(从1开始)
for:
# 每次迭代要执行的子步骤
- break_if: for_i>2 # 满足条件则跳出循环
swipe_down:
sleep: 2
```
46. once: 只执行一次,等价于 `for(1)`;
once 结合 moveon_if,可以模拟 python 的 `if` 语法效果
```yaml
once:
# 每次迭代要执行的子步骤
- moveon_if: for_i<=2 # 满足条件则往下走,否则跳出循环
swipe_down:
sleep: 2
```
47. break_if: 满足条件则跳出循环;
只能定义在for/once循环的子步骤中
```yaml
break_if: for_i>2 # 条件表达式,python语法
```
48. moveon_if: 满足条件则往下走,否则跳出循环;
只能定义在for/once循环的子步骤中
```yaml
moveon_if: for_i<=2 # 条件表达式,python语法
```
49. moveon_if_exist_by: 如果检查元素存在 则往下走,否则跳出循环;
只能定义在for/once循环的子步骤中
```yaml
moveon_if_exist_by:
id: com.shikee.shikeeapp:id/button1
```
50. break_if_exist_by: 如果检查元素存在 则跳出循环,否则往下走;
只能定义在for/once循环的子步骤中
```yaml
break_if_exist_by:
id: button1
```
51. include: 包含其他步骤文件,如记录公共的步骤,或记录配置数据(如用户名密码);
```yaml
include: part-common.yml
```
52. set_vars: 设置变量;
```yaml
set_vars:
name: shi
password: 123456
birthday: 5-27
```
53. print_vars: 打印所有变量;
```yaml
print_vars:
```
54. base_url: 设置基础url
```yaml
base_url: https://www.taobao.com/
```
55. get: 发get请求, 但无跳转;
```yaml
get:
url: $dyn_data_url # url,支持写变量
extract_by_eval:
dyn_data: "json.loads(response.text[16:-1])" # 变量response是响应对象
```
56. post: 发post请求, 但无跳转;
```yaml
post:
url: http://admin.jym1.com/store/add_store # url,支持写变量
is_ajax: true
data: # post的参数
# 参数名:参数值
store_name: teststore-${random_str(6)}
store_logo_url: '$img'
```
57. upload: 上传文件;
```yaml
upload: # 上传文件/图片
url: http://admin.jym1.com/upload/common_upload_img/store_img
files: # 上传的多个文件
# 参数名:文件本地路径
file: /home/shi/fruit.jpeg
extract_by_jsonpath:
img: $.data.url
```
58. download: 下载文件;
变量`download_file`记录最新下载的单个文件
```yaml
download:
url: https://img.alicdn.com/tfscom/TB1t84NPuL2gK0jSZPhXXahvXXa.jpg_q90.jpg
save_dir: downloads # 保存的目录,默认为 downloads
save_file: test.jpg # 保存的文件名,默认为url中最后一级的文件名
```
59. recognize_captcha: 识别验证码;
参数同 `download` 动作, 因为内部就是调用 `download`;
而变量`captcha`记录识别出来的验证码
```
recognize_captcha:
url: http://admin.jym1.com/login/verify_image
# save_dir: downloads # 保存的目录,默认为 downloads
# save_file: test.jpg # 保存的文件名,默认为url中最后一级的文件名
```
60. recognize_captcha_element: 识别验证码标签中的验证码;
参数同 `screenshot_element_by` 动作, 因为内部就是调用 `screenshot_element_by`;
而变量`captcha`记录识别出来的验证码
```
recognize_captcha_element:
# 元素查找方式(id/sid/class/xpath) : 查找的值
#id: io.material.catalog:id/cat_demo_landing_row_root
xpath: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout
#save_dir: downloads # 保存的目录,默认为 downloads
#save_file: test.jpg # 保存的文件名,默认为url中最后一级的文件名
```
61. exec: 执行命令, 可用于执行 HttpBoot/SeleniumBoot/AppiumBoot/MiniumBoot 等命令,以便打通多端的用例流程
```yaml
exec: ls
exec: SeleniumBoot test.yml
```
## 校验器
主要是为了校验页面或响应的内容, 根据不同场景有2种写法
```
1. 针对当前页面, 那么校验器作为普通动作来写
2. 针对 get/post/upload 有发送http请求的动作, 那么校验器在动作内作为普通属性来写
```
不同校验器适用于不同场景
| 校验器 | 当前页面场景 | http请求场景 |
| ------------ | ------------ | ------------ |
| validate_by_id | Y | N |
| validate_by_aid | Y | N |
| validate_by_class | Y | N |
| validate_by_xpath | Y | Y |
| validate_by_css | N | Y |
| validate_by_jsonpath | N | Y |
1. validate_by_id:
从当前页面中校验 id 对应的元素的值
```yaml
validate_by_id:
"io.material.catalog:id/cat_demo_text": # 元素的id
'=': 'Hello world' # 校验符号或函数: 校验的值
```
2. validate_by_aid:
从当前页面中校验 accessibility_id 对应的元素的值
```yaml
validate_by_aid:
"Timer": # 元素的accessibility_id
'>': '2022-07-06 12:00:00' # 校验符号或函数: 校验的值
```
3. validate_by_class:
从当前页面中校验类名对应的元素的值
```yaml
validate_by_class:
"android.widget.TextView": # 元素的类名
'=': 'Hello world' # 校验符号或函数: 校验的值
```
4. validate_by_xpath:
从当前页面或html响应中校验 xpath 路径对应的元素的值
```yaml
validate_by_xpath:
"/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout": # 元素的xpath路径
'>': 0 # 校验符号或函数: 校验的值, 即 id 元素的值>0
"/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[2]/android.widget.LinearLayout":
contains: 衬衫 # 即 title 元素的值包含'衬衫'
```
5. validate_by_css:
从html响应中校验类名对应的元素的值
```yaml
validate_by_css:
'#id': # 元素的css selector 模式
'>': 0 # 校验符号或函数: 校验的值, 即 id 元素的值>0
'#goods_title':
contains: 衬衫 # 即 title 元素的值包含'衬衫'
```
6. validate_by_jsonpath:
从json响应中校验 多层属性 的值
```yaml
validate_by_jsonpath:
'$.data.goods_id':
'>': 0 # 校验符号或函数: 校验的值, 即 id 元素的值>0
'$.data.goods_title':
contains: 衬衫 # 即 title 元素的值包含'衬衫'
```
#### 校验符号或函数
1. `=`: 相同
2. `>`: 大于
3. `<`: 小于
4. `>=`: 大于等于
5. `<=`: 小于等于
6. `contains`: 包含子串
7. `startswith`: 以子串开头
8. `endswith`: 以子串结尾
9. `regex_match`: 正则匹配
10. `exist`: 元素存在
11. `not_exist`: 元素不存在
## 提取器
主要是为了从页面或响应中提取变量, 根据不同场景有2种写法
```
1. 针对当前页面, 那么提取器作为普通动作来写
2. 针对 get/post/upload 有发送http请求的动作, 那么提取器在动作内作为普通属性来写
```
不同校验器适用于不同场景
| 校验器 | 页面场景 | http请求场景 |
| ------------ | ------------ | ------------ |
| extract_by_id | Y | N |
| extract_by_aid | Y | N |
| extract_by_class | Y | N |
| extract_by_xpath | Y | Y |
| extract_by_jsonpath | N | Y |
| extract_by_css | N | Y |
| extract_by_eval | Y | Y |
1. extract_by_id:
从当前页面中解析 id 对应的元素的值
```yaml
extract_by_id:
# 变量名: 元素id
goods_id: "io.material.catalog:id/cat_demo_text"
```
2. extract_by_aid:
从当前页面中解析 accessibility_id 对应的元素的值
```yaml
extract_by_aid:
# 变量名: 元素的accessibility_id
update_time: "Timer"
```
3. extract_by_class:
从当前页面中解析类名对应的元素的值
```yaml
extract_by_class:
# 变量名: 元素的accessibility_id
name: "android.widget.TextView"
```
4. extract_by_xpath:
从当前页面或html响应中解析 xpath 路径指定的元素的值
```yaml
extract_by_xpath:
# 变量名: xpath路径
goods_id: /hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.view.ViewGroup/androidx.recyclerview.widget.RecyclerView/android.widget.FrameLayout[1]/android.widget.LinearLayout
# 获得元素的属性
goods_img_element: /hierarchy/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.view.ViewGroup/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.LinearLayout[1]/android.widget.FrameLayout/android.widget.ScrollView/android.widget.LinearLayout/android.widget.RelativeLayout[1]/android.widget.FrameLayout/android.widget.RelativeLayout/androidx.viewpager.widget.ViewPager/android.widget.ImageView/@class
```
5. extract_by_css:
从html响应中解析 css selector 模式指定的元素的值
```yaml
extract_by_css:
# 变量名: css selector 模式
goods_id: table>tbody>tr:nth-child(1)>td:nth-child(1) # 第一行第一列
url: //*[@id="1"]/div/div/h3/a/@href # 获得<a>的href属性
```
6. extract_by_jsonpath:
从json响应中解析 多层属性 的值
```yaml
extract_by_jsonpath:
# 变量名: json响应的多层属性
img: $.data.url
```
7. extract_by_eval:
使用 `eval(表达式)` 执行表达式, 并将执行结果记录到变量中
```yaml
extract_by_eval:
# 变量名: 表达式(python语法)
dyn_data: "json.loads(response.text[16:-1])" # 变量response是响应对象
```
|
AppiumBoot
|
/AppiumBoot-1.0.5.tar.gz/AppiumBoot-1.0.5/README.md
|
README.md
|
import logging
import base64
import re
import sys
import time
import traceback
from typing import Dict, Any, Union, Tuple
# sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath( __file__)))) # The sys.path.append line adds the
# parent directory of the tests directory to the Python module search path, allowing you to import modules from the
# root folder.
from appium_extended_helpers.helpers_decorators import log_debug
class Terminal:
def __init__(self, driver, logger: logging.Logger = None, log_level: int = logging.INFO, log_path: str = ''):
self.driver = driver
self.logger = logger
if logger is None:
self.logger = logging.getLogger(__name__)
self.logger.setLevel(log_level)
if bool(log_path):
if not log_path.endswith('.log'):
log_path = log_path + '.log'
file_handler = logging.FileHandler(log_path)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
@log_debug()
def adb_shell(self, command: str, args: str = "") -> Any:
try:
return self.driver.execute_script("mobile: shell", {'command': command, 'args': [args]})
except KeyError as e:
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
@log_debug()
def push(self, source: str, destination: str) -> bool:
"""
Копирует файл или директорию на подключенное устройство через Appium сервер.
Аргументы:
driver: Appium WebDriver объект.
source (str): Путь к копируемому файлу или директории на локальной машине.
destination (str): Путь назначения на устройстве.
Возвращает:
bool: True, если файл или директория были успешно скопированы, False в противном случае.
"""
try:
self.driver.push_file(
destination_path=destination,
source_path=source
)
return True
except IOError as e:
self.logger.error("appium_extended_terminal.push()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def pull(self, source: str, destination: str) -> bool:
"""Извлекает файл с устройства по пути `source` и сохраняет его по пути `destination` на компьютере.
Аргументы:
source (str): Путь к файлу на устройстве.
destination (str): Путь, по которому файл должен быть сохранен на компьютере.
Возвращает:
bool: True, если файл успешно извлечен и сохранен, False в противном случае.
"""
file_contents_base64 = self.driver.assert_extension_exists('mobile: pullFile'). \
execute_script('mobile: pullFile', {'remotePath': source})
if not file_contents_base64:
return False
try:
decoded_contents = base64.b64decode(file_contents_base64)
with open(destination, 'wb') as file:
file.write(decoded_contents)
return True
except IOError as e:
self.logger.error("appium_extended_terminal.pull")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def start_activity(self, package: str, activity: str) -> bool:
"""
Запускает активити на подключенном устройстве.
Аргументы:
package (str): Название пакета.
activity (str): Название запускаемой активити.
Возвращает:
bool: True, если активность была успешно запущена, False в противном случае.
"""
try:
self.adb_shell(command="am", args=f"start -n {package}/{activity}")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.start_activity()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def get_current_app_package(self) -> Union[str, None]:
"""
Получает пакет текущего запущенного приложения на устройстве с помощью ADB.
Возвращает:
str: Название пакета текущего запущенного приложения, либо None, если произошла ошибка.
"""
try:
result = self.adb_shell(command="dumpsys", args="window windows")
lines = result.split('\n')
for line in lines:
if 'mCurrentFocus' in line or 'mFocusedApp' in line:
matches = re.search(r'(([A-Za-z]{1}[A-Za-z\d_]*\.)+([A-Za-z][A-Za-z\d_]*)/)', line)
if matches:
return matches.group(1)[:-1] # removing trailing slash
return None
except KeyError as e:
# Логируем ошибку, если возникло исключение
self.logger.error("appium_extended_terminal.get_current_app_package()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return None
@log_debug()
def close_app(self, package: str) -> bool:
"""
Принудительно останавливает указанный пакет с помощью ADB.
Аргументы:
package (str): Название пакета приложения для закрытия.
Возвращает:
bool: True, если приложение успешно закрыто, False в противном случае.
"""
try:
self.adb_shell(command="am", args=f"force-stop {package}")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.close_app()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def reboot_app(self, package: str, activity: str) -> bool:
"""
Перезапускает приложение, закрывая его и затем запуская указанную активность.
Аргументы:
package (str): Название пакета приложения.
activity (str): Название активности для запуска.
Возвращает:
bool: True, если перезапуск приложения выполнен успешно, False в противном случае.
"""
# Закрытие приложения
if not self.close_app(package=package):
return False
# Запуск указанной активности
if not self.start_activity(package=package, activity=activity):
return False
return True
@log_debug()
def install_app(self, app_path: str) -> bool:
"""
Устанавливает указанный пакет с помощью Appium.
Дублирует команду драйвера. Добавлено для интуитивности.
Аргументы:
package (str): Название пакета приложения для установки.
Возвращает:
bool: True, если приложение успешно удалено, False в противном случае.
"""
try:
self.driver.install_app(app_path=app_path)
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.install_app()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def is_app_installed(self, package) -> bool:
"""
Проверяет, установлен ли пакет.
"""
self.logger.debug(f"is_app_installed() < {package=}")
try:
result = self.adb_shell(command="pm", args="list packages")
# Фильтруем пакеты
if any([line.strip().endswith(package) for line in result.splitlines()]):
self.logger.debug("is_app_installed() > True")
return True
self.logger.debug("is_app_installed() > False")
return False
except KeyError as e:
self.logger.error("appium_extended_terminal.is_app_installed() > False")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def uninstall_app(self, package: str) -> bool:
"""
Удаляет указанный пакет с помощью ADB.
Аргументы:
package (str): Название пакета приложения для удаления.
Возвращает:
bool: True, если приложение успешно удалено, False в противном случае.
"""
try:
self.driver.remove_app(app_id=package)
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.uninstall_app()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def press_home(self) -> bool:
"""
Отправляет событие нажатия кнопки Home на устройство с помощью ADB.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.input_keycode(keycode="KEYCODE_HOME")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.press_home()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def press_back(self) -> bool:
"""
Отправляет событие нажатия кнопки Back на устройство с помощью ADB.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.input_keycode(keycode="KEYCODE_BACK")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.press_back()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def press_menu(self) -> bool:
"""
Отправляет событие нажатия кнопки Menu на устройство с помощью ADB.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.input_keycode(keycode="KEYCODE_MENU")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.press_menu()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def input_keycode_num_(self, num: int) -> bool:
"""
Отправляет событие нажатия клавиши с числовым значением на устройство с помощью ADB.
Допустимые значения: 0-9, ADD, COMMA, DIVIDE, DOT, ENTER, EQUALS
Аргументы:
num (int): Числовое значение клавиши для нажатия.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.adb_shell(command="input", args=f"keyevent KEYCODE_NUMPAD_{num}")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.input_keycode_num_()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def input_keycode(self, keycode: str) -> bool:
"""
Вводит указанный код клавиши на устройстве с помощью ADB.
Аргументы:
keycode (str): Код клавиши для ввода.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.adb_shell(command="input", args=f"keyevent {keycode}")
return True
except KeyError as e:
self.logger.error("appium_extended_terminal.input_keycode()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def input_by_virtual_keyboard(self, key: str, keyboard: Dict[str, tuple]) -> bool:
"""
Вводит строку символов с помощью виртуальной клавиатуры.
Аргументы:
key (str): Строка символов для ввода.
keyboard (dict): Словарь с маппингом символов на координаты нажатий.
Возвращает:
bool: True, если ввод выполнен успешно, False в противном случае.
"""
try:
for char in key:
# Вызываем функцию tap с координатами, соответствующими символу char
self.tap(x=keyboard[str(char)][0], y=keyboard[str(char)][1])
return True
except KeyError as e:
# Логируем ошибку и возвращаем False в случае возникновения исключения
self.logger.error("appium_extended_terminal.input_by_virtual_keyboard")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def input_text(self, text: str) -> bool:
"""
Вводит указанный текст на устройстве с помощью ADB.
Аргументы:
text (str): Текст для ввода.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.adb_shell(command="input", args=f"text {text}")
return True
except KeyError as e:
# Логируем ошибку, если возникло исключение
self.logger.error("appium_extended_terminal.input_text()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def tap(self, x: int, y: int) -> bool:
"""
Выполняет нажатие на указанные координаты на устройстве с помощью ADB.
Аргументы:
x: Координата X для нажатия.
y: Координата Y для нажатия.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.adb_shell(command="input", args=f"tap {str(x)} {str(y)}")
return True
except KeyError as e:
# Логируем ошибку, если возникло исключение
self.logger.error("appium_extended_terminal.tap()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def swipe(self, start_x: Union[str, int], start_y: Union[str, int],
end_x: Union[str, int], end_y: Union[str, int], duration: int = 300) -> bool:
"""
Выполняет свайп (перетаскивание) с одной точки на экране в другую на устройстве с помощью ADB.
Аргументы:
start_x: Координата X начальной точки свайпа.
start_y: Координата Y начальной точки свайпа.
end_x: Координата X конечной точки свайпа.
end_y: Координата Y конечной точки свайпа.
duration (int): Длительность свайпа в миллисекундах (по умолчанию 300).
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
try:
self.adb_shell(command="input",
args=f"swipe {str(start_x)} {str(start_y)} {str(end_x)} {str(end_y)} {str(duration)}")
return True
except KeyError as e:
# Логируем ошибку, если возникло исключение
self.logger.error("appium_extended_terminal.swipe()")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def swipe_right_to_left(self, duration: int = 300) -> bool:
window_size = self.get_screen_resolution()
width = window_size[0]
height = window_size[1]
left = int(width * 0.1)
right = int(width * 0.9)
return self.swipe(start_x=right,
start_y=height // 2,
end_x=left,
end_y=height // 2,
duration=duration)
@log_debug()
def swipe_left_to_right(self, duration: int = 300) -> bool:
window_size = self.get_screen_resolution()
width = window_size[0]
height = window_size[1]
left = int(width * 0.1)
right = int(width * 0.9)
return self.swipe(start_x=left,
start_y=height // 2,
end_x=right,
end_y=height // 2,
duration=duration)
@log_debug()
def swipe_top_to_bottom(self, duration: int = 300) -> bool:
window_size = self.get_screen_resolution()
height = window_size[1]
top = int(height * 0.1)
bottom = int(height * 0.9)
return self.swipe(start_x=top,
start_y=height // 2,
end_x=bottom,
end_y=height // 2,
duration=duration)
@log_debug()
def swipe_bottom_to_top(self, duration: int = 300) -> bool:
window_size = self.get_screen_resolution()
height = window_size[1]
top = int(height * 0.1)
bottom = int(height * 0.9)
return self.swipe(start_x=bottom,
start_y=height // 2,
end_x=top,
end_y=height // 2,
duration=duration)
@log_debug()
def check_vpn(self, ip_address: str = '') -> bool:
"""
Проверяет, активно ли VPN-соединение на устройстве с помощью ADB.
Аргументы:
ip (str): IP-адрес для проверки VPN-соединения. Если не указан, используется значение из конфигурации.
Возвращает:
bool: True, если VPN-соединение активно, False в противном случае.
"""
try:
output = self.adb_shell(command="netstat", args="")
lines = output.split('\n')
for line in lines:
if ip_address in line and "ESTABLISHED" in line:
self.logger.debug("check_VPN() True")
return True
self.logger.debug("check_VPN() False")
return False
except KeyError as e:
# Логируем ошибку, если возникло исключение
self.logger.error("appium_extended_terminal.check_VPN")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def stop_logcat(self) -> bool:
"""
Останавливает выполнение logcat на устройстве с помощью ADB.
Возвращает:
bool: True, если выполнение logcat остановлено успешно, False в противном случае.
"""
# Получаем список выполняющихся процессов logcat
try:
process_list = self.adb_shell(command="ps", args="")
except KeyError as e:
self.logger.error("appium_extended_terminal.stop_logcat")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
# Проходим по списку процессов и отправляем каждому сигнал SIGINT
for process in process_list.splitlines():
if "logcat" in process:
pid = process.split()[1]
try:
self.adb_shell(command="kill", args=f"-SIGINT {str(pid)}")
except KeyError as e:
self.logger.error("appium_extended_terminal.stop_logcat")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def know_pid(self, name: str) -> Union[int, None]:
"""
Находит Process ID (PID) процесса по его имени, используя adb shell ps.
Параметры:
name (str): Имя процесса, PID которого нужно найти.
Возвращает:
Union[int, None]: PID процесса, если он найден, или None, если процесс не найден.
"""
# Получение списка всех процессов с помощью adb shell ps
processes = self.adb_shell(command="ps")
if name not in processes:
self.logger.error("know_pid() [Процесс не обнаружен]")
return None
# Разделение вывода на строки и удаление пустых строк
lines = processes.strip().split('\n')
# Проход по каждой строке вывода, начиная с 2-й строки, игнорируя заголовки
for line in lines[1:]:
# Разделение строки на столбцы по пробелам
columns = line.split()
# Проверка, что строка имеет не менее 9 столбцов
if len(columns) >= 9:
# Извлечение PID и имени процесса из соответствующих столбцов
pid, process_name = columns[1], columns[8]
# Сравнение имени процесса с искомым именем
if name == process_name:
self.logger.debug(f"know_pid() > {str(pid)}")
return int(pid)
self.logger.error("know_pid() [Процесс не обнаружен]")
# Возврат None, если процесс с заданным именем не найден
return None
@log_debug()
def is_process_exist(self, name) -> bool:
"""
Проверяет, запущен ли процесс, используя adb shell ps.
Параметры:
name (str): Имя процесса.
Возвращает:
bool: True если процесс с указанным именем существует, False в ином случае.
"""
# Получение списка всех процессов с помощью adb shell ps
processes = self.adb_shell(command="ps")
if name not in processes:
self.logger.debug("is_process_exist() > False")
return False
# Разделение вывода на строки и удаление пустых строк
lines = processes.strip().split('\n')
# Проход по каждой строке вывода, начиная с 2-й строки, игнорируя заголовки
for line in lines[1:]:
# Разделение строки на столбцы по пробелам
columns = line.split()
# Проверка, что строка имеет не менее 9 столбцов
if len(columns) >= 9:
# Извлечение PID и имени процесса из соответствующих столбцов
_, process_name = columns[1], columns[8]
# Сравнение имени процесса с искомым именем
if name == process_name:
self.logger.debug("is_process_exist() > True")
return True
self.logger.debug("is_process_exist() > False")
# Возврат None, если процесс с заданным именем не найден
return False
@log_debug()
def run_background_process(self, command: str, args: str = "", process: str = "") -> bool:
"""
Запускает процесс в фоновом режиме на устройстве Android.
Аргументы:
command (str): Команда для выполнения на устройстве.
process (str): Название процесса, который будет запущен. По умолчанию "".
Если process == "", то не будет проверяться его запуск в системе.
Возвращает:
bool: True, если процесс был успешно запущен, False в противном случае.
"""
self.logger.debug(f"run_background_process() < {command=}")
try:
self.adb_shell(command=command, args=args + " nohup > /dev/null 2>&1 &")
if process != "":
time.sleep(1)
if not self.is_process_exist(name=process):
return False
return True
except KeyError as e:
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
@log_debug()
def kill_by_pid(self, pid: int) -> bool:
"""
Отправляет сигнал SIGINT для остановки процесса по указанному идентификатору PID с помощью ADB.
Аргументы:
pid (str): Идентификатор PID процесса для остановки.
Возвращает:
bool: True, если процесс успешно остановлен, False в противном случае.
"""
try:
self.adb_shell(command="kill", args=f"-s SIGINT {str(pid)}")
except KeyError as e:
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def kill_by_name(self, name: str) -> bool:
"""
Останавливает все процессы с указанным именем на устройстве с помощью ADB.
Аргументы:
name (str): Имя процесса для остановки.
Возвращает:
bool: True, если все процессы успешно остановлены, False в противном случае.
"""
self.logger.debug(f"kill_by_name() < {name=}")
try:
self.adb_shell(command="pkill", args=f"-l SIGINT {str(name)}")
except KeyError as e:
self.logger.error("kill_by_name() > False")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
self.logger.debug("kill_by_name() > True")
return True
@log_debug()
def kill_all(self, name: str) -> bool:
"""
Останавливает все процессы, соответствующие указанному имени, на устройстве с помощью ADB.
Аргументы:
name (str): Имя процесса или шаблон имени для остановки.
Возвращает:
bool: True, если все процессы успешно остановлены, False в противном случае.
"""
try:
self.adb_shell(command="pkill", args=f"-f {str(name)}")
except KeyError as e:
self.logger.error("appium_extended_terminal.kill_all")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def delete_files_from_internal_storage(self, path) -> bool:
"""
Удаляет файлы из внутреннего хранилища устройства с помощью ADB.
Аргументы:
path (str): Путь к папке с файлами для удаления.
Возвращает:
bool: True, если файлы успешно удалены, False в противном случае.
"""
try:
self.adb_shell(command="rm", args=f"-rf {path}*")
except KeyError as e:
self.logger.error("appium_extended_terminal.delete_files_from_internal_storage")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def delete_file_from_internal_storage(self, path: str, filename: str) -> bool:
"""
Удаляет файл из внутреннего хранилища устройства с помощью ADB.
Аргументы:
path (str): Путь к папке с файлами для удаления.
filename (str): Наименование файла.
Возвращает:
bool: True, если файл успешно удален, False в противном случае.
"""
try:
if path.endswith('/'):
path = path[:-1]
self.adb_shell(command="rm", args=f"-rf {path}/{filename}")
except KeyError as e:
self.logger.error("appium_extended_terminal.delete_file_from_internal_storage")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def record_video(self, **options: Any) -> bool:
"""
Начинает запись видео. 3 минуты максимум.
Аргументы:
filename (str): Имя файла для сохранения видео.
Возвращает:
bool: True, если запись видео успешно начата, False в противном случае.
"""
try:
self.driver.start_recording_screen(**options)
except KeyError as e:
self.logger.error("appium_extended_terminal.record_video")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def stop_video(self, **options: Any) -> Union[bytes, None]:
"""
Останавливает запись видео. Возвращает Base64 bytes
Возвращает:
bool: True, если запись видео успешно остановлена, False в противном случае.
"""
try:
str_based64_video = self.driver.stop_recording_screen(**options)
# Декодируем base64-кодированную строку в бинарные данные видео
return base64.b64decode(str_based64_video)
except KeyError as e:
self.logger.error("appium_extended_terminal.stop_video")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return None
@log_debug()
def reboot(self) -> bool:
"""
Перезагружает устройство с помощью ADB.
Возвращает:
bool: True, если перезагрузка успешно запущена, False в противном случае.
"""
try:
self.adb_shell(command='reboot')
except KeyError as e:
self.logger.error("appium_extended_terminal.reboot")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
return False
return True
@log_debug()
def get_screen_resolution(self) -> Union[Tuple[int, int], None]:
"""
Возвращает разрешение экрана устройства с помощью ADB.
Возвращает:
tuple[int, int] or None: Кортеж с шириной и высотой экрана в пикселях, или None в случае ошибки.
"""
try:
output = self.adb_shell(command='wm', args='size')
if "Physical size" in output:
resolution_str = output.split(":")[1].strip()
width, height = resolution_str.split("x")
return int(width), int(height)
except KeyError as e:
self.logger.error("appium_extended_terminal.get_screen_resolution")
self.logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
self.logger.error(traceback_info)
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_terminal/terminal.py
|
terminal.py
|
import logging
import os
import re
import subprocess
import sys
import time
import traceback
from typing import Dict, Union, Tuple, Optional, Any
# sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(
# __file__)))) # The sys.path.append line adds the parent directory of the tests directory to the Python module search path, allowing you to import modules from the root folder.
from appium_extended_utils import operations
logger = logging.getLogger(__name__)
class Adb:
@staticmethod
def get_device_uuid() -> Union[str, None]:
"""
Получает UUID подключенного устройства Android с помощью команды adb.
Returns:
UUID в виде строки.
"""
logger.debug("get_device_uuid()")
# Определение команды для выполнения с помощью adb для получения списка устройств
command = ['adb', 'devices']
try:
# Выполнение команды и получение вывода
response = str(subprocess.check_output(command))
# Извлечение списка устройств из полученного вывода с использованием регулярных выражений
device_list = re.findall(r'(\d+\.\d+\.\d+\.\d+:\d+|\d+)', response)
try:
# Возвращение первого устройства из списка (UUID подключенного устройства Android)
logger.debug(f"get_device_uuid() > {device_list[0]}")
return device_list[0]
except IndexError:
logger.error("get_device_uuid() > None")
logger.error("Нет подключенных устройств")
return None
except subprocess.CalledProcessError as e:
logger.error("get_device_uuid() > None")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return None
@staticmethod
def get_device_model() -> Optional[str]:
"""
Получает модель подключенного устройства Android с помощью команды adb.
Возвращает модель устройства.
"""
logger.debug("get_device_model()")
command = ["adb", "shell", "getprop", "ro.product.model"]
try:
# Выполнение команды и получение вывода
model = subprocess.check_output(command)
# Преобразование байтовой строки в обычную строку и удаление пробельных символов и символов перевода строки
model = model.decode().strip()
return model
except subprocess.CalledProcessError as e:
logger.error("get_device_model() > None")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
@staticmethod
def push(source: str, destination: str) -> bool:
"""
Копирует файл или директорию на подключенное устройство.
Аргументы:
source (str): Путь к копируемому файлу или директории.
destination (str): Путь назначения на устройстве.
Возвращает:
bool: True, если файл или директория были успешно скопированы, False в противном случае.
"""
logger.debug(f"push() < {source=}, {destination=}")
if not os.path.exists(source):
logger.error(f"Путь к копируемому файлу или директории не существует: {source=}")
return False
command = ["adb", "push", source, destination]
try:
subprocess.run(command, check=True)
logger.debug("push() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("push() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def pull(source: str, destination: str) -> bool:
"""
Копирует файл или директорию с подключенного устройства.
Аргументы:
source (str): Путь к исходному файлу или директории на устройстве.
destination (str): Целевой путь для сохранения скопированного файла или директории.
Возвращает:
bool: True, если файл или директория были успешно скопированы, False в противном случае.
"""
logger.debug(f"pull() < {source=}, {destination=}")
command = ["adb", "pull", source, destination]
try:
subprocess.run(command, check=True)
logger.debug("pull() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("pull() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def install_app(source: str) -> bool:
"""
Устанавливает файл APK на подключенном устройстве.
Аргументы:
source (str): Путь к файлу APK для установки.
Возвращает:
bool: True, если файл APK был успешно установлен, False в противном случае.
"""
logger.debug(f"install() < {source=}")
command = ["adb", "install", "-r", source]
try:
subprocess.run(command, check=True)
logger.debug("install() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("install() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def is_app_installed(package) -> bool:
"""
Проверяет, установлен ли пакет.
"""
logger.debug(f"is_installed() < {package=}")
command = "adb shell pm list packages"
try:
result = subprocess.check_output(command, shell=True).decode().strip()
# Фильтруем пакеты
if any([line.strip().endswith(package) for line in result.splitlines()]):
logger.debug("install() > True")
return True
logger.debug("install() > False")
return False
except subprocess.CalledProcessError as e:
logger.error("install() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def uninstall_app(package: str) -> bool:
"""
Удаляет указанный пакет с помощью ADB.
Аргументы:
package (str): Название пакета приложения для удаления.
Возвращает:
bool: True, если приложение успешно удалено, False в противном случае.
"""
logger.debug(f"uninstall_app() < {package=}")
command = ['adb', 'uninstall', package]
try:
subprocess.run(command, check=True)
logger.debug("uninstall_app() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("uninstall_app() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def start_activity(package: str, activity: str) -> bool:
"""
Запускает активность на подключенном устройстве.
Аргументы:
package (str): Название пакета активности.
activity (str): Название запускаемой активности.
Возвращает:
bool: True, если активность была успешно запущена, False в противном случае.
"""
logger.debug(f"start_activity() < {package=}, {activity=}")
command = ['adb', 'shell', 'am', 'start', '-n', f'{package}/{activity}']
try:
subprocess.check_output(command)
logger.debug("start_activity() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("start_activity() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def get_current_activity() -> Union[str, None]:
"""
Получает активити текущего запущенного приложения на устройстве с помощью ADB.
Возвращает имя активити в виде строки или None, если произошла ошибка.
Возвращает:
str: Название активити текущего запущенного приложения, либо None, если произошла ошибка.
"""
# Вывод информации о запуске функции в лог
logger.debug("get_current_activity()")
# Команда для ADB для получения информации о текущих окнах
command = ['adb', 'shell', 'dumpsys', 'window', 'windows']
try:
# Выполнение команды и декодирование результата
result = subprocess.check_output(command, shell=True).decode().strip()
# Определение паттерна для поиска нужной информации в результатах
pattern = r'mCurrentFocus|mFocusedApp'
# Вызов функции grep_pattern для поиска соответствия паттерну
matched_lines = operations.grep_pattern(input_string=result, pattern=pattern)
# Если были найдены соответствующие строки
if matched_lines:
for line in matched_lines:
# Поиск имени активити в строке
match = re.search(r'\/([^\/}]*)', line)
if match:
# Возвращаем найденное значение, исключая '/'
activity_name = match.group(1)
logger.debug(f"get_current_activity() > {activity_name}")
return activity_name
# Если не удалось найти активити, возвращаем None
logger.error("get_current_activity() > None")
return None
except subprocess.CalledProcessError as e:
# Обработка ошибки при выполнении команды
logger.error(e)
# Вывод информации о трассировке в лог
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
logger.error("get_current_activity() > None")
return None
@staticmethod
def get_current_package() -> Union[str, None]:
"""
Получает пакет текущего запущенного приложения на устройстве с помощью ADB.
Возвращает имя пакета в виде строки или None, если произошла ошибка.
Возвращает:
str: Название пакета текущего запущенного приложения, либо None, если произошла ошибка.
"""
# Вывод информации о запуске функции в лог
logger.debug("get_current_app_package()")
# Команда для ADB для получения информации о текущих окнах
command = ['adb', 'shell', 'dumpsys', 'window', 'windows']
try:
# Выполнение команды и декодирование результата
result = subprocess.check_output(command, shell=True).decode().strip()
# Определение паттерна для поиска нужной информации в результатах
pattern = r'mCurrentFocus|mFocusedApp'
# Вызов функции grep_pattern для поиска соответствия паттерну
matched_lines = operations.grep_pattern(input_string=result, pattern=pattern)
# Если были найдены соответствующие строки
if matched_lines:
for line in matched_lines:
# Поиск имени пакета в строке
match = re.search(r'u0\s(.+?)/', line)
if match:
# Возвращаем найденное значение
package_name = match.group(1)
logger.debug(f"get_current_app_package() > {package_name}")
return package_name
# Если не удалось найти имя пакета, возвращаем None
logger.error("get_current_app_package() > None")
return None
except subprocess.CalledProcessError as e:
# Обработка ошибки при выполнении команды
logger.error(e)
# Вывод информации о трассировке в лог
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
logger.error("get_current_app_package() > None")
return None
@staticmethod
def close_app(package: str) -> bool:
"""
Принудительно останавливает указанный пакет с помощью ADB.
Аргументы:
package (str): Название пакета приложения для закрытия.
Возвращает:
bool: True, если приложение успешно закрыто, False в противном случае.
"""
logger.debug(f"close_app() < {package=}")
command = ['adb', 'shell', 'am', 'force-stop', package]
try:
subprocess.run(command, check=True)
logger.debug("close_app() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("close_app() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def reboot_app(package: str, activity: str) -> bool:
"""
Перезапускает приложение, закрывая его и затем запуская указанную активность.
Аргументы:
package (str): Название пакета приложения.
activity (str): Название активности для запуска.
Возвращает:
bool: True, если перезапуск приложения выполнен успешно, False в противном случае.
"""
logger.debug(f"reboot_app() < {package=}, {activity=}")
# Закрытие приложения
if not Adb.close_app(package=package):
logger.error("reboot_app() > False")
return False
# Запуск указанной активности
if not Adb.start_activity(package=package, activity=activity):
logger.error("reboot_app() > False")
return False
logger.debug("reboot_app() > True")
return True
@staticmethod
def press_home() -> bool:
"""
Отправляет событие нажатия кнопки Home на устройство с помощью ADB.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug("press_home()")
command = ['adb', 'shell', 'input', 'keyevent', 'KEYCODE_HOME']
try:
subprocess.run(command, check=True)
logger.debug("press_home() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("press_home() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def press_back() -> bool:
"""
Отправляет событие нажатия кнопки Back на устройство с помощью ADB.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug("press_back()")
command = ['adb', 'shell', 'input', 'keyevent', 'KEYCODE_BACK']
try:
subprocess.run(command, check=True)
logger.debug("press_back() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("press_back() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def press_menu() -> bool:
"""
Отправляет событие нажатия кнопки Menu на устройство с помощью ADB.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug("press_menu()")
command = ['adb', 'shell', 'input', 'keyevent', 'KEYCODE_MENU']
try:
subprocess.run(command, check=True)
logger.debug("press_menu() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("adb.press_menu() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def input_keycode_num_(num: int) -> bool:
"""
Отправляет событие нажатия клавиши с числовым значением на устройство с помощью ADB.
Допустимые значения: 0-9, ADD, COMMA, DIVIDE, DOT, ENTER, EQUALS
Аргументы:
num (int): Числовое значение клавиши для нажатия.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug(f"input_keycode_num_() < {num=}")
command = ['adb', 'shell', 'input', 'keyevent', f'KEYCODE_NUMPAD_{num}']
try:
subprocess.run(command, check=True)
logger.debug("input_keycode_num_() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("input_keycode_num_() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def input_keycode(keycode: str) -> bool:
"""
Вводит указанный код клавиши на устройстве с помощью ADB.
Аргументы:
keycode (str): Код клавиши для ввода.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug(f"input_keycode() < {keycode=}")
command = ['adb', 'shell', 'input', 'keyevent', f'{keycode}']
try:
subprocess.run(command, check=True)
logger.debug("input_keycode() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("input_keycode() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def input_by_virtual_keyboard(text: str, keyboard: Dict[str, tuple]) -> bool:
"""
Вводит строку символов с помощью виртуальной клавиатуры.
Аргументы:
key (str): Строка символов для ввода.
keyboard (dict): Словарь с маппингом символов на координаты нажатий.
Возвращает:
bool: True, если ввод выполнен успешно, False в противном случае.
"""
logger.debug(f"input_by_virtual_keyboard() < {text=}, {keyboard=}")
try:
for char in text:
# Вызываем функцию tap с координатами, соответствующими символу char
Adb.tap(*keyboard[char])
logger.debug("input_by_virtual_keyboard() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("input_by_virtual_keyboard() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def input_text(text: str) -> bool:
"""
Вводит указанный текст на устройстве с помощью ADB.
Аргументы:
text (str): Текст для ввода.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug(f"input_text() < {text=}")
# Формируем команду для ввода текста с использованием ADB
command = ['adb', 'shell', 'input', 'text', text]
try:
# Выполняем команду
subprocess.run(command, check=True)
logger.debug("input_text() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("input_text() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def tap(x: Union[str, int], y: Union[str, int]) -> bool:
"""
Выполняет нажатие на указанные координаты на устройстве с помощью ADB.
Аргументы:
x: Координата X для нажатия.
y: Координата Y для нажатия.
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug(f"tap() < {x=}, {y=}")
# Формируем команду для выполнения нажатия по указанным координатам с использованием ADB
command = ['adb', 'shell', 'input', 'tap', str(x), str(y)]
try:
subprocess.run(command, check=True)
logger.debug("tap() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("tap() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def swipe(start_x: Union[str, int], start_y: Union[str, int],
end_x: Union[str, int], end_y: Union[str, int],
duration: int = 300) -> bool:
"""
Выполняет свайп (перетаскивание) с одной точки на экране в другую на устройстве с помощью ADB.
Аргументы:
start_x: Координата X начальной точки свайпа.
start_y: Координата Y начальной точки свайпа.
end_x: Координата X конечной точки свайпа.
end_y: Координата Y конечной точки свайпа.
duration (int): Длительность свайпа в миллисекундах (по умолчанию 300).
Возвращает:
bool: True, если команда была успешно выполнена, False в противном случае.
"""
logger.debug(f"swipe() < {start_x=}, {start_y=}, {end_x=}, {end_y=}, {duration=}")
# Формируем команду для выполнения свайпа с использованием ADB
command = ['adb', 'shell', 'input', 'swipe', str(start_x), str(start_y), str(end_x), str(end_y), str(duration)]
try:
# Выполняем команду
subprocess.run(command, check=True)
logger.debug("swipe() > True")
return True
except subprocess.CalledProcessError as e:
# Логируем ошибку, если возникло исключение
logger.error("swipe() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def check_vpn(ip_address: str = '') -> bool:
"""
Проверяет, активно ли VPN-соединение на устройстве с помощью ADB.
Аргументы:
ip_address (str): IP-адрес для проверки VPN-соединения. Если не указан, используется значение из конфигурации.
Возвращает:
bool: True, если VPN-соединение активно, False в противном случае.
"""
logger.debug(f"check_vpn() < {ip_address=}")
# Определяем команду в виде строки
command = "adb shell netstat"
try:
# Выполняем команду и получаем вывод
output = subprocess.run(command, shell=True, capture_output=True, text=True, check=True)
# Поиск строки
lines = output.stdout.split("\n")
for line in lines:
if "ESTABLISHED" in line and ip_address in line:
logger.debug("check_vpn() True")
return True
logger.debug("check_vpn() False")
return False
except subprocess.CalledProcessError as e:
# Логируем ошибку, если возникло исключение
logger.error("check_vpn() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def stop_logcat() -> bool:
"""
Останавливает выполнение logcat на устройстве с помощью ADB.
Возвращает:
bool: True, если выполнение logcat остановлено успешно, False в противном случае.
"""
logger.debug("stop_logcat()")
if Adb.is_process_exist(name='logcat'):
if Adb.kill_all(name='logcat'):
logger.debug("stop_logcat() > True")
return True
logger.error("stop_logcat() > False")
logger.debug("stop_logcat() [Запущенного процесса logcat не обнаружено]")
return False
@staticmethod
def is_process_exist(name) -> bool:
"""
Проверяет, запущен ли процесс, используя adb shell ps.
Параметры:
name (str): Имя процесса.
Возвращает:
bool: True если процесс с указанным именем существует, False в ином случае.
"""
logger.debug(f"is_process_exist() < {name=}")
command = ['adb', 'shell', 'ps']
try:
processes = subprocess.check_output(command, shell=True).decode().strip()
except subprocess.CalledProcessError as e:
logger.error("know_pid() > None")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
# Разделение вывода на строки и удаление пустых строк
lines = processes.strip().split('\n')
# Проход по каждой строке вывода, начиная с 2-й строки, игнорируя заголовки
for line in lines[1:]:
# Разделение строки на столбцы по пробелам
columns = line.split()
# Проверка, что строка имеет не менее 9 столбцов
if len(columns) >= 9:
# Извлечение PID и имени процесса из соответствующих столбцов
_, process_name = columns[1], columns[8]
# Сравнение имени процесса с искомым именем
if name == process_name:
logger.debug("is_process_exist() > True")
return True
# Возврат None, если процесс с заданным именем не найден
logger.debug("is_process_exist() > False")
return False
@staticmethod
def run_background_process(command: str, process: str = "") -> bool:
"""
Запускает процесс в фоновом режиме на устройстве Android с использованием ADB.
Аргументы:
command (str): Команда для выполнения на устройстве.
process (str): Название процесса, который будет запущен. По умолчанию "".
Если process == "", то не будет проверяться его запуск в системе.
Возвращает:
bool: True, если процесс был успешно запущен, False в противном случае.
"""
logger.debug(f"run_background_process() < {command=}")
command = f"{command} nohup > /dev/null 2>&1 &"
try:
subprocess.Popen(command, stdout=subprocess.DEVNULL) # не добавлять with
if process != "":
time.sleep(1)
if not Adb.is_process_exist(name=process):
return False
logger.debug("run_background_process() > True")
return True
except subprocess.CalledProcessError as e:
logger.error("run_background_process() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
@staticmethod
def reload_adb() -> bool:
"""
Перезапускает adb-сервер на устройстве.
Возвращает:
bool: True, если adb-сервер успешно перезапущен, False в противном случае.
"""
logger.debug("reload_adb()")
try:
command = ['adb', 'kill-server']
subprocess.run(command, check=True)
except subprocess.CalledProcessError as e:
logger.error("reload_adb() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
# Ожидаем некоторое время перед запуском adb-сервера
time.sleep(3)
try:
command = ['adb', 'start-server']
subprocess.run(command, check=True)
except subprocess.CalledProcessError as e:
logger.error("reload_adb() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("reload_adb() > True")
return True
@staticmethod
def know_pid(name: str) -> Union[int, None]:
"""
Находит Process ID (PID) процесса по его имени, используя adb shell ps.
Параметры:
name (str): Имя процесса, PID которого нужно найти.
Возвращает:
Union[int, None]: PID процесса, если он найден, или None, если процесс не найден.
"""
logger.debug(f"know_pid() < {name=}")
command = ['adb', 'shell', 'ps']
try:
processes = subprocess.check_output(command, shell=True).decode().strip()
except subprocess.CalledProcessError as e:
logger.error("know_pid() > None")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return None
# Разделение вывода на строки и удаление пустых строк
lines = processes.strip().split('\n')
# Проход по каждой строке вывода, начиная с 2-й строки, игнорируя заголовки
for line in lines[1:]:
# Разделение строки на столбцы по пробелам
columns = line.split()
# Проверка, что строка имеет не менее 9 столбцов
if len(columns) >= 9:
# Извлечение PID и имени процесса из соответствующих столбцов
pid, process_name = columns[1], columns[8]
# Сравнение имени процесса с искомым именем
if name == process_name:
logger.debug(f"know_pid() > {pid=}")
return int(pid)
# Возврат None, если процесс с заданным именем не найден
logger.error("know_pid() > None")
logger.error("know_pid() [Процесс не обнаружен]")
return None
@staticmethod
def kill_by_pid(pid: Union[str, int]) -> bool:
"""
Отправляет сигнал SIGINT для остановки процесса по указанному идентификатору PID с помощью ADB.
Аргументы:
pid (str): Идентификатор PID процесса для остановки.
Возвращает:
bool: True, если процесс успешно остановлен, False в противном случае.
"""
logger.debug(f"kill_by_pid() < {pid=}")
command = ['adb', 'shell', 'kill', '-s', 'SIGINT', str(pid)]
try:
subprocess.call(command)
except subprocess.CalledProcessError as e:
logger.error("kill_by_pid() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("kill_by_pid() > True")
return True
@staticmethod
def kill_by_name(name: str) -> bool:
"""
Останавливает все процессы с указанным именем на устройстве с помощью ADB.
Аргументы:
name (str): Имя процесса для остановки.
Возвращает:
bool: True, если все процессы успешно остановлены, False в противном случае.
"""
logger.debug(f"kill_by_name() < {name=}")
command = ['adb', 'shell', 'pkill', '-l', 'SIGINT', str(name)]
try:
subprocess.call(command)
except subprocess.CalledProcessError as e:
logger.error("kill_by_name() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("kill_by_name() > True")
return True
@staticmethod
def kill_all(name: str) -> bool:
"""
Останавливает все процессы, соответствующие указанному имени, на устройстве с помощью ADB.
Аргументы:
name (str): Имя процесса или шаблон имени для остановки.
Возвращает:
bool: True, если все процессы успешно остановлены, False в противном случае.
"""
logger.debug(f"kill_all() < {name=}")
command = ['adb', 'shell', 'pkill', '-f', str(name)]
try:
subprocess.run(command, check=True)
except subprocess.CalledProcessError as e:
logger.error("kill_all() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("kill_all() > True")
return True
@staticmethod
def delete_files_from_internal_storage(path: str) -> bool:
"""
Удаляет файлы из внутреннего хранилища устройства с помощью ADB.
Аргументы:
path (str): Путь к файлам для удаления.
Возвращает:
bool: True, если файлы успешно удалены, False в противном случае.
"""
logger.debug(f"delete_files_from_internal_storage() < {path=}")
command = ['adb', 'shell', 'rm', '-rf', f'{path}*']
try:
subprocess.run(command, check=True)
except subprocess.CalledProcessError as e:
logger.error("delete_files_from_internal_storage() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("delete_files_from_internal_storage() > True")
return True
@staticmethod
def pull_video(source: str = None, destination: str = ".", delete: bool = True) -> bool:
"""
Копирует видеофайлы с устройства на компьютер с помощью ADB.
Аргументы:
wherefrom (str): Путь к исходным видеофайлам на устройстве.
destination (str): Путь для сохранения скопированных видеофайлов.
delete (bool): Удалять исходные видеофайлы с устройства после копирования (по умолчанию True).
Возвращает:
bool: True, если видеофайлы успешно скопированы, False в противном случае.
"""
logger.debug(f"pull_video() < {destination=}")
if not source:
source = '/sdcard/Movies/'
if source.endswith('/'):
source = source + "/"
if destination.endswith('/'):
destination = destination + "/"
command = ['adb', 'pull', f'{source}', f'{destination}']
try:
subprocess.run(command, check=True)
except subprocess.CalledProcessError as e:
logger.error("pull_video() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
if delete:
command = ['adb', 'shell', 'rm', '-rf', f'{source}*']
try:
subprocess.run(command, check=True)
except subprocess.CalledProcessError as e:
logger.error("pull_video() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("pull_video() > True")
return True
@staticmethod
def stop_video() -> bool:
"""
Останавливает запись видео на устройстве с помощью ADB.
Возвращает:
bool: True, если запись видео успешно остановлена, False в противном случае.
"""
logger.debug("stop_video()")
command = ['adb', 'shell', 'pkill', '-l', 'SIGINT', 'screenrecord']
try:
subprocess.call(command)
except subprocess.CalledProcessError as e:
logger.error("stop_video() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("stop_video() > True")
return True
@staticmethod
def record_video(path: str = "sdcard/Movies/", filename: str = "screenrecord.mp4") -> \
Union[subprocess.Popen[bytes], subprocess.Popen[Union[Union[str, bytes], Any]]]:
"""
Записывает видео на устройстве с помощью ADB.
Аргументы:
path (str): Путь куда сохранить файл
filename (str): Имя файла для сохранения видео.
Возвращает:
subprocess.CompletedProcess: Процесс записи видео.
"""
logger.debug(f"record_video() < {filename}")
if path.endswith('/'):
path = path[:-1]
if filename.endswith('.mp4'):
filename = filename + ".mp4"
command = ['adb', 'shell', 'screenrecord', f'{path}/{filename}']
try:
# Запускаем команду adb shell screenrecord для начала записи видео
return subprocess.Popen(command)
except subprocess.CalledProcessError as e:
# Если произошла ошибка при выполнении команды, логируем ошибку и возвращаем False
logger.error("record_video() > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
@staticmethod
def start_record_video(path: str = "sdcard/Movies/", filename: str = "screenrecord.mp4") -> bool:
"""
Отправляет команду на устройство для начала записи видео.
Аргументы:
path (str): Путь куда сохранить файл
filename (str): Имя файла для сохранения видео.
Возвращает:
bool: True, если запись видео успешно начата, False в противном случае.
"""
if path.endswith('/'):
path = path[:-1]
if not filename.endswith('.mp4'):
filename = filename + ".mp4"
command = ['adb', 'shell', 'screenrecord', f'{path}/{filename}']
try:
# Запускаем команду adb shell screenrecord для начала записи видео
subprocess.Popen(command) # не добавлять with
return True
except subprocess.CalledProcessError:
# Если произошла ошибка при выполнении команды, возвращаем False
return False
@staticmethod
def reboot() -> bool:
"""
Перезагружает устройство с помощью ADB.
Возвращает:
bool: True, если перезагрузка успешно запущена, False в противном случае.
"""
logger.debug("reboot()")
command = ['adb', 'shell', 'reboot']
try:
subprocess.call(command)
except subprocess.CalledProcessError as e:
logger.error("reboot > False")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return False
logger.debug("reboot() > True")
return True
@staticmethod
def get_screen_resolution() -> Union[Tuple[int, int], None]:
"""
Возвращает разрешение экрана устройства с помощью ADB.
Возвращает:
tuple[int, int] or None: Кортеж с шириной и высотой экрана в пикселях, или None в случае ошибки.
"""
logger.debug("get_screen_resolution()")
command = ['adb', 'shell', 'wm', 'size']
try:
output = subprocess.check_output(command).decode()
if "Physical size" in output:
resolution_str = output.split(":")[1].strip()
width, height = resolution_str.split("x")
logger.debug(f"get_screen_resolution() > {width=}, {height=}")
return int(width), int(height)
logger.error(f"Unexpected output from adb: {output}")
except (subprocess.CalledProcessError, ValueError) as e:
logger.error("get_screen_resolution() > None")
logger.error(e)
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
logger.error(traceback_info)
return None
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_terminal/adb.py
|
adb.py
|
import logging
import os
import subprocess
logger = logging.getLogger(__name__)
class Aapt:
@staticmethod
def get_package_name(path_to_apk: str) -> str:
"""
Получает название пакета APK-файла с помощью команды aapt.
Возвращает название пакета.
"""
logger.info(f"get_package_name() < {path_to_apk}")
command = ["aapt", "dump", "badging", os.path.join(path_to_apk)]
try:
# Выполнение команды и получение вывода
output: str = str(subprocess.check_output(command)).strip()
# Извлечение строки, содержащей информацию о пакете
start_index = output.index("package: name='") + len("package: name='")
end_index = output.index("'", start_index)
# Извлекаем название пакета
package_name = output[start_index:end_index]
except subprocess.CalledProcessError as e:
logger.error(f"Could not extract package name. Error: {str(e)}")
raise # Выбрасываем исключение дальше
except ValueError:
logger.error(f"Could not find package name in the output.")
raise # Выбрасываем исключение дальше
logger.info(f"get_package_name() > {package_name}")
# Возвращение названия пакета в виде строки
return package_name
@staticmethod
def get_launchable_activity(path_to_apk: str) -> str:
"""
Получает название запускаемой активности из APK-файла с помощью команды aapt.
Возвращает название активности в виде строки.
"""
logger.info(f"get_launchable_activity_from_apk() < {path_to_apk}")
command = ["aapt", "dump", "badging", path_to_apk]
try:
# Выполнение команды и получение вывода
output = subprocess.check_output(command, universal_newlines=True).strip()
# Извлечение строки, содержащей информацию о запускаемой активности
package_line = next(line for line in output.splitlines() if line.startswith("launchable-activity"))
# Извлечение названия активности из строки
launchable_activity = package_line.split("'")[1]
# Возвращение названия активности в виде строки
logger.info(f"get_launchable_activity_from_apk() > {launchable_activity}")
return launchable_activity
except subprocess.CalledProcessError as e:
logger.error(f"Could not extract launchable activity. Error: {str(e)}")
except StopIteration:
logger.error("Could not find 'launchable-activity' line in aapt output.")
return ""
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_terminal/aapt.py
|
aapt.py
|
import logging
from collections import deque
from typing import Any, Optional, List
from appium_extended.appium_extended import AppiumExtended
from appium_extended_graph.appium_graph import AppiumGraph
from appium_extended_helpers.appium_image import AppiumImage
class AppiumNavigator:
def __init__(self, app, logger: logging.Logger):
self.app: AppiumExtended = app
self.driver = self.app.driver
self.graph_manager = AppiumGraph(self)
self.logger = logger
self.image = AppiumImage(driver=app.driver, logger=logger)
def add_page(self, page, edges):
"""
Добавление вершины в граф навигации по приложению.
Вершина - представляет собой страницу (экран / окно).
"""
self.graph_manager.add_page(page=page, edges=edges)
def navigate(self, current_page: Any, destination_page: Any, timeout: int = 55) -> None:
"""
Навигация от текущей страницы к целевой странице.
Этот метод выполняет навигацию от текущей страницы к указанной целевой странице в вашем приложении.
Он использует поиск пути и последовательное выполнение шагов навигации, чтобы достичь цели.
Args:
current_page (Type[YourPageClass]): Класс текущей страницы, на которой находится пользователь.
destination_page (Type[YourPageClass]): Класс целевой страницы, на которую пользователь хочет перейти.
timeout (int, optional): Максимальное время ожидания перехода, по умолчанию 55 секунд.
Raises:
ValueError: Если не удается найти путь от текущей страницы к целевой странице.
"""
if current_page == destination_page:
return
# Находим путь от текущей страницы к целевой странице
path = self.find_path(current_page, destination_page)
if not path:
raise ValueError(f"No path found from {current_page} to {destination_page}")
# Выполняем навигацию, следуя найденному пути
self.perform_navigation(path, timeout)
def find_path(self, start_page: Any, target_page: Any) -> Optional[List[Any]]:
"""
Находит путь от стартовой страницы до целевой страницы.
Этот метод использует поиск в ширину (BFS) для нахождения пути от стартовой страницы до целевой.
Он обходит граф переходов между страницами, сохраняя текущий путь и посещенные страницы.
Args:
start_page (Any): Начальная страница поиска пути.
target_page (Any): Целевая страница, которую нужно достичь.
Returns:
Optional[List[Any]]: Список страниц, образующих путь от стартовой до целевой страницы.
Если путь не найден, возвращает None.
"""
# Создаем множество для отслеживания посещенных страниц
visited = set()
# Используем очередь для выполнения поиска в ширину
queue = deque([(start_page, [])])
# Пока очередь не пуста, выполняем поиск
while queue:
# Извлекаем текущую страницу и путь от стартовой страницы до нее
current_window, path = queue.popleft()
# Добавляем текущую страницу в список посещенных
visited.add(current_window)
# Получаем переходы (соседние страницы) для текущей страницы
transitions = self.graph_manager.get_edges(page=current_window)
# Проверяем каждую соседнюю страницу
for next_window in transitions:
# Если соседняя страница является целевой, возвращаем полный путь
if next_window == target_page:
return path + [current_window, next_window]
# Если соседняя страница не была посещена, добавляем ее в очередь для дальнейшего поиска
if next_window not in visited:
queue.append((next_window, path + [current_window]))
# Возвращаем None, если путь не найден
return None
def perform_navigation(self, path: List[Any], timeout: int = 55) -> None:
"""
Выполняет навигацию по заданному пути.
Этот метод принимает список страниц, который представляет собой путь для навигации.
Он выполняет переходы между соседними страницами, чтобы достичь целевой страницы.
Args:
path (List[Any]): Список страниц, образующих путь для навигации.
Каждый элемент списка представляет страницу, а порядок элементов в списке
определяет последовательность переходов от одной страницы к другой.
Returns:
None
"""
# Проходим по пути и выполняем переходы между соседними страницами
for page in range(len(path) - 1):
current_page = path[page]
next_page = path[page + 1]
try:
# Получаем метод перехода между текущей и следующей страницами
transition_method = current_page.edges[next_page]
# Выполняем переход
transition_method()
except KeyError as e:
# В случае ошибки выводим сообщение о неудачном переходе
self.logger.error("perform_navigation() Не найден способ перехода")
self.logger.exception(e)
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_navigator/appium_navigator.py
|
appium_navigator.py
|
import traceback
from typing import Optional, Union, Tuple, List, Dict
from appium.webdriver import WebElement
from appium.webdriver.common.appiumby import AppiumBy
from appium.webdriver.common.mobileby import MobileBy
from selenium.webdriver.common.by import By
class GetElementError(Exception):
"""
Возникает, когда попытка получить элемент не удалась.
"""
def __init__(self,
message,
locator=None,
by=None,
value=None,
timeout_elem=None,
timeout_method=None,
elements_range=None,
contains=None,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.locator = locator
self.by = by
self.value = value
self.timeout_elem = timeout_elem
self.timeout_method = timeout_method
self.elements_range = elements_range
self.contains = contains
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class GetElementsError(Exception):
"""
Возникает, когда попытка получить элементы не удалась.
"""
def __init__(self,
message: str,
locator: Union[Tuple, List[WebElement], Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elements: int = None,
timeout_method: int = None,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = None,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.locator = locator
self.by = by
self.value = value
self.timeout_elements = timeout_elements
self.timeout_method = timeout_method
self.elements_range = elements_range
self.contains = contains
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class GetImageCoordinatesError(Exception):
"""
Возникает когда попытка найти изображение не удалась
"""
def __init__(self,
message,
image=None,
full_image=None,
threshold=None,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.full_image = full_image,
self.image = image,
self.threshold = threshold
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class GetManyCoordinatesOfImageError(Exception):
"""
Возникает, когда попытка найти все вхождения частичного изображения внутри полного изображения не удалась.
"""
def __init__(self,
message: str,
image: Union[bytes, str] = None,
full_image: Union[bytes, str] = None,
cv_threshold: Optional[float] = None,
coord_threshold: Optional[int] = None,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.image = image
self.full_image = full_image
self.cv_threshold = cv_threshold
self.coord_threshold = coord_threshold
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class GetInnerImageCoordinatesError(Exception):
"""
Возникает, когда попытка извлечь внутреннее изображение из изображения не удалась.
"""
def __init__(self,
message,
outer_image_path=None,
inner_image_path=None,
threshold=None,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.outer_image_path = outer_image_path,
self.inner_image_path = inner_image_path,
self.threshold = threshold
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class GetTextCoordinatesError(Exception):
"""
Возникает, когда попытка найти координаты текста на изображении или экране не удалась.
"""
def __init__(self,
message: str,
text: str,
language: Optional[str] = None,
image: Union[bytes, str] = None,
ocr: Optional[bool] = None,
contains: bool = None,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.text = text
self.language = language
self.image = image
self.ocr = ocr
self.contains = contains
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class FindAndGetElementError(Exception):
"""
Возникает, когда попытка найти и извлечь элемент не удалась.
"""
def __init__(self,
message: str,
locator: Union[Tuple[str, str], 'WebElement', 'WebElementExtended', Dict[str, str], str],
timeout: int,
tries: int,
contains: bool,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.locator = locator
self.timeout = timeout
self.tries = tries
self.contains = contains
self.traceback = traceback.format_exc()
self.original_exception = original_exception
class IsElementWithinScreenError(Exception):
"""
Возникает, когда происходит ошибка при проверке, находится ли элемент на видимом экране.
"""
def __init__(self,
message: str,
locator: Union[Tuple[str, str], 'WebElement', 'WebElementExtended', Dict[str, str], str],
timeout: int,
contains: bool,
original_exception: Exception
):
super().__init__(message)
self.locator = locator
self.timeout = timeout
self.contains = contains
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class IsTextOnScreenError(Exception):
"""
Возникает, когда происходит ошибка при проверке, присутствует ли заданный текст на экране.
"""
def __init__(self,
message: str,
text: str,
language: str,
ocr: bool,
contains: bool,
original_exception: Exception
):
super().__init__(message)
self.text = text
self.language = language
self.ocr = ocr
self.contains = contains
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class IsImageOnScreenError(Exception):
"""
Возникает, когда происходит ошибка при проверке, присутствует ли заданное изображение на экране.
"""
def __init__(self,
message: str,
image: Union[bytes, str],
threshold: float,
original_exception: Exception
):
super().__init__(message)
self.image = image
self.threshold = threshold
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class TapError(Exception):
"""
Возникает, когда происходит ошибка при выполнении тапа.
"""
def __init__(self,
message: str,
locator: Union[Tuple[str, str], 'WebElementExtended', 'WebElement', Dict[str, str], str] = None,
x: int = None,
y: int = None,
image: Union[bytes, str] = None,
duration: Optional[int] = None,
timeout: int = 5,
original_exception: Optional[Exception] = None
):
super().__init__(message)
self.locator = locator
self.x = x
self.y = y
self.image = image
self.duration = duration
self.timeout = timeout
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class SwipeError(Exception):
"""
Возникает, если свайп не может быть выполнен.
"""
def __init__(self, message: str, start_position, end_position, direction, distance, duration,
original_exception: Optional[Exception] = None):
super().__init__(message)
self.start_position = start_position
self.end_position = end_position
self.direction = direction
self.distance = distance
self.duration = duration
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class WaitForError(Exception):
"""
Возникает, когда элемент или изображение не появляются на экране в течение заданного времени.
"""
def __init__(self, message: str, locator, image, timeout: int, contains: bool,
original_exception: Optional[Exception] = None):
super().__init__(message)
self.locator = locator
self.image = image
self.timeout = timeout
self.contains = contains
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class WaitForNotError(Exception):
"""
Возникает, когда элемент или изображение не исчезают с экрана в течение заданного времени.
"""
def __init__(self, message: str, locator, image, timeout: int, contains: bool,
original_exception: Optional[Exception] = None):
super().__init__(message)
self.locator = locator
self.image = image
self.timeout = timeout
self.contains = contains
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class WaitReturnTrueError(Exception):
"""
Возникает, когда метод не возвращает True в течение заданного времени.
"""
def __init__(self, message: str, method, timeout: int, original_exception: Optional[Exception] = None):
super().__init__(message)
self.method = method
self.timeout = timeout
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class DrawByCoordinatesError(Exception):
"""
Возникает, когда не удается нарисовать прямоугольник на изображении.
"""
def __init__(self, message: str, coordinates: Tuple[int, int, int, int], top_left: Tuple[int, int],
bottom_right: Tuple[int, int], path: str, original_exception: Optional[Exception] = None):
super().__init__(message)
self.coordinates = coordinates
self.top_left = top_left
self.bottom_right = bottom_right
self.path = path
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class ExtractPointCoordinatesByTypingError(Exception):
"""
Возникает, когда не удается извлечь координаты точки на основе типа переданной позиции.
"""
def __init__(self,
message: str,
position: Union[Tuple[int, int], str, bytes, 'np.ndarray', 'Image.Image',
Tuple[str, str], Dict, WebElement, 'WebElementExtended'],
original_exception: Optional[Exception] = None):
super().__init__(message)
self.position = position
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class ExtractPointCoordinatesError(Exception):
"""
Возникает, когда не удается извлечь координаты точки на основе заданных параметров.
"""
def __init__(self,
message: str,
direction: int,
distance: int,
start_x: int,
start_y: int,
screen_resolution: Tuple[int, int],
original_exception: Optional[Exception] = None):
super().__init__(message)
self.direction = direction
self.distance = distance
self.start_x = start_x
self.start_y = start_y
self.screen_resolution = screen_resolution
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class GetScreenshotError(Exception):
"""
Возникает, когда не удается получить скриншот экрана.
"""
def __init__(self, message: str, original_exception: Optional[Exception] = None):
super().__init__(message)
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class SaveScreenshotError(Exception):
"""
Возникает, когда не удается сохранить скриншот.
"""
def __init__(self, message: str, path: str, filename: str, original_exception: Optional[Exception] = None):
super().__init__(message)
self.path = path
self.filename = filename
self.original_exception = original_exception
self.traceback = traceback.format_exc()
class SaveSourceError(Exception):
"""
Возникает, когда не удается сохранить исходный код страницы.
"""
def __init__(self, message: str, path: str, filename: str, original_exception: Optional[Exception] = None):
super().__init__(message)
self.path = path
self.filename = filename
self.original_exception = original_exception
self.traceback = traceback.format_exc()
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_exceptions/appium_extended_exceptions.py
|
appium_extended_exceptions.py
|
import base64
import logging
import os
from typing import Union, List, Tuple, Optional
import cv2
import numpy as np
from PIL import Image
from pytesseract import pytesseract
from selenium.common.exceptions import WebDriverException
from appium_extended_helpers import helpers_decorators
from appium_extended_terminal.terminal import Terminal
class AppiumImage:
"""
Класс работы с Appium.
Обеспечивает работу с изображениями
"""
def __init__(self, driver, logger: logging.Logger):
self.logger = logger
self.driver = driver
self.terminal = Terminal(driver=self.driver, logger=logger)
@helpers_decorators.retry
def get_image_coordinates(self,
image: Union[bytes, np.ndarray, Image.Image, str],
full_image: Union[bytes, np.ndarray, Image.Image, str] = None,
threshold: Optional[float] = 0.7,
) -> Union[Tuple[int, int, int, int], None]:
"""
Находит координаты наиболее вероятного совпадения частичного изображения в полном изображении.
Args:
image (Union[bytes, np.ndarray, Image.Image, str]):
Частичное изображение или путь к файлу, которое нужно найти внутри полного изображения.
full_image (Union[bytes, np.ndarray, Image.Image, str], optional):
Полное изображение или путь к файлу. По умолчанию None, в этом случае используется скриншот экрана.
threshold (float, optional):
Минимальный порог совпадения для считывания совпадения допустимым. По умолчанию 0.7.
Usages:
app.get_image_coordinates('path/to/partial_image.png', 'path/to/full_image.png')
app.get_image_coordinates('path/to/partial_image.png', threshold=0.8)
Returns:
Union[Tuple[int, int, int, int], None]:
Кортеж с координатами наиболее вероятного совпадения (x1, y1, x2, y2)
или None, если совпадение не найдено.
"""
if full_image is None:
screenshot = self._get_screenshot_as_base64_decoded()
big_image = self.to_ndarray(image=screenshot, grayscale=True)
else:
big_image = self.to_ndarray(image=full_image, grayscale=True) # Загрузка полного изображения
small_image = self.to_ndarray(image=image, grayscale=True) # Загрузка частичного изображения
# Сопоставление частичного изображения и снимка экрана
max_val_loc = self._multi_scale_matching(full_image=big_image, template_image=small_image,
threshold=threshold)
if max_val_loc is None:
return None
max_val, max_loc = max_val_loc
if not max_val >= threshold: # Если наибольшее значение совпадения не превышает порога, возвращаем None
self.logger.error("find_coordinates_by_image(): Совпадений не найдено")
return None
# Вычисляем координаты левого верхнего и правого нижнего углов найденного совпадения
left = int(max_loc[0])
top = int(max_loc[1])
width = small_image.shape[1]
height = small_image.shape[0]
right = left + width
bottom = top + height
return int(left), int(top), int(right), int(bottom) # Возвращаем координаты наиболее вероятного совпадения
@helpers_decorators.retry
def get_inner_image_coordinates(self,
outer_image_path: Union[bytes, np.ndarray, Image.Image, str],
inner_image_path: Union[bytes, np.ndarray, Image.Image, str],
threshold: float = 0.9) -> Union[Tuple[int, int, int, int], None]:
"""
Находит изображение на экране и внутри него находит другое изображение (внутреннее).
Args:
outer_image_path (Union[bytes, np.ndarray, Image.Image, str]):
Внешнее изображение или путь к файлу, которое нужно найти на экране.
inner_image_path (Union[bytes, np.ndarray, Image.Image, str]):
Внутреннее изображение или путь к файлу, которое нужно найти внутри внешнего изображения.
threshold (float, optional):
Пороговое значение сходства для шаблонного сопоставления. По умолчанию 0.9.
Usages:
app.get_inner_image_coordinates('path/to/outer_image.png', 'path/to/inner_image.png')
app.get_inner_image_coordinates('path/to/outer_image.png', 'path/to/inner_image.png', threshold=0.8)
Returns:
Union[Tuple[int, int, int, int], None]:
Координаты внутреннего изображения относительно экрана в формате (x1, y1, x2, y2).
Если внутреннее изображение не найдено, возвращает None.
Note:
Повторяет выполнение 3 раза при неудаче.
"""
# Получаем разрешение экрана
screen_width, screen_height = self.terminal.get_screen_resolution()
# Захватываем скриншот
screenshot = base64.b64decode(self.driver.get_screenshot_as_base64())
# Читаем скриншот
full_image = self.to_ndarray(image=screenshot, grayscale=True)
# Прочитать внешнее изображение
outer_image = self.to_ndarray(image=outer_image_path, grayscale=True)
# Прочитать внутреннее изображение
inner_image = self.to_ndarray(image=inner_image_path, grayscale=True)
# Вычисляем коэффициенты масштабирования
width_ratio = screen_width / full_image.shape[1]
height_ratio = screen_height / full_image.shape[0]
# ...
inner_image = cv2.resize(inner_image, None, fx=width_ratio, fy=height_ratio)
outer_image = cv2.resize(outer_image, None, fx=width_ratio, fy=height_ratio)
outer_max_val, outer_max_loc = self._multi_scale_matching(full_image=full_image, template_image=outer_image,
threshold=threshold)
# Проверить, превышает ли максимальное значение сходства для внешнего изображения пороговое значение
if outer_max_val >= threshold:
# Получить размеры внешнего изображения
outer_height, outer_width = outer_image.shape
# Вычислить координаты внешнего изображения на экране
outer_top_left = outer_max_loc
outer_bottom_right = (outer_top_left[0] + outer_width, outer_top_left[1] + outer_height)
# Извлечь область интереса (ROI), содержащую внешнее изображение
outer_roi = full_image[outer_top_left[1]:outer_bottom_right[1], outer_top_left[0]:outer_bottom_right[0]]
inner_max_val, inner_max_loc = self._multi_scale_matching(full_image=outer_roi, template_image=inner_image,
threshold=threshold)
# Проверить, превышает ли максимальное значение сходства для внутреннего изображения пороговое значение
if inner_max_val >= threshold:
# Получить размеры внутреннего изображения
inner_height, inner_width = inner_image.shape
# Вычислить координаты внутреннего изображения относительно экрана
inner_top_left = (outer_top_left[0] + inner_max_loc[0], outer_top_left[1] + inner_max_loc[1])
inner_bottom_right = (inner_top_left[0] + inner_width, inner_top_left[1] + inner_height)
# Вернуть координаты внутреннего изображения относительно экрана
return inner_top_left + inner_bottom_right
# Вернуть None, если внутреннее изображение не найдено
return None
def is_image_on_the_screen(self,
image: Union[bytes, np.ndarray, Image.Image, str],
threshold: float = 0.9) -> bool:
"""
Сравнивает, присутствует ли заданное изображение на экране.
Args:
image (Union[bytes, np.ndarray, Image.Image, str]): Изображение для поиска на экране.
Может быть в формате байтов, массива numpy, объекта Image.Image или строки с путем до файла.
threshold (float): Пороговое значение схожести части изображения со снимком экрана.
Returns:
bool: Возвращает `True`, если изображение найдено на экране, иначе `False`.
Raises:
cv2.error: Ошибки, связанные с OpenCV.
AssertionError: Ошибки, связанные с неверными размерами изображений.
Exception: Остальные исключения.
"""
try:
screenshot = self._get_screenshot_as_base64_decoded()
# Чтение снимка экрана и частичного изображения
full_image = self.to_ndarray(image=screenshot, grayscale=True)
small_image = self.to_ndarray(image=image, grayscale=True)
# Проверка размеров изображений
if small_image.shape[0] > full_image.shape[0] or small_image.shape[1] > full_image.shape[1]:
self.logger.error("Частичное изображение больше снимка экрана.")
return False
# Сопоставление частичного изображения и снимка экрана
max_val, max_loc = self._multi_scale_matching(full_image=full_image, template_image=small_image,
threshold=threshold)
return max_val > threshold
except cv2.error as e:
self.logger.error(f"is_image_on_the_screen(): {e}")
return False
except AssertionError as e:
self.logger.error(f"is_image_on_the_screen(): {e}")
return False
except Exception as e:
self.logger.error(f"is_image_on_the_screen(): {e}")
return False
def _multi_scale_matching(self,
full_image: np.ndarray,
template_image: np.ndarray,
threshold: float = 0.8,
return_raw: bool = False):
origin_width, origin_height = template_image.shape[::-1] # Исходный размер шаблона
# Цикл по различным масштабам, включая масштабы больше 1.0 для "растягивания"
for scale in np.concatenate([np.linspace(0.2, 1.0, 10)[::-1], np.linspace(1.1, 2.0, 10)]):
# Изменение размера изображения и сохранение масштаба
resized = cv2.resize(full_image, (int(full_image.shape[1] * scale), int(full_image.shape[0] * scale)))
# Если измененный размер становится меньше шаблона, прерываем цикл
if resized.shape[0] < origin_height or resized.shape[1] < origin_width:
continue
# Сопоставление шаблона
result = cv2.matchTemplate(resized, template_image, cv2.TM_CCOEFF_NORMED)
_, max_val, _, max_loc = cv2.minMaxLoc(result)
if max_val > threshold:
if return_raw:
return result
# Преобразование координат обратно к оригинальному масштабу
max_loc_original = (int(max_loc[0] / scale), int(max_loc[1] / scale))
return max_val, max_loc_original
if return_raw:
return None
return 0, (0, 0)
def is_text_on_ocr_screen(self,
text: str,
screen: Union[bytes, np.ndarray, Image.Image, str] = None,
language: str = 'rus') -> bool:
"""
Проверяет, присутствует ли заданный текст на экране.
Распознавание текста производит с помощью библиотеки pytesseract.
Аргументы:
- text (str): Текст, который нужно найти на экране.
- screen (bytes, optional): Скриншот в формате bytes. Если не указан, будет захвачен скриншот с помощью `self.driver`.
- language (str): Язык распознавания текста. Значение по умолчанию: 'rus'.
Возвращает:
- bool: True, если заданный текст найден на экране. False в противном случае.
"""
try:
if screen is None:
screenshot = self._get_screenshot_as_base64_decoded()
image = self.to_ndarray(screenshot)
else:
image = self.to_ndarray(screen)
# Бинаризация изображения
_, image_bin = cv2.threshold(image, 0, 255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU) # Применение бинаризации для получения двоичного изображения
# Преобразование двоичного изображения в текст
custom_config = r'--oem 3 --psm 6'
ocr_text = pytesseract.image_to_string(image_bin, lang=language, config=custom_config)
# Проверка наличия заданного текста в распознанном тексте
return text.lower() in ocr_text.lower()
except cv2.error as e:
self.logger.error(f"is_text_on_ocr_screen(): {e}")
return False
except pytesseract.TesseractError as e:
self.logger.error(f"is_text_on_ocr_screen(): {e}")
return False
except AssertionError as e:
self.logger.error(f"is_text_on_ocr_screen(): {e}")
return False
except Exception as e:
self.logger.error(f"is_text_on_ocr_screen(): {e}")
return False
@helpers_decorators.retry
def get_many_coordinates_of_image(self,
image: Union[bytes, np.ndarray, Image.Image, str],
full_image: Union[bytes, np.ndarray, Image.Image, str] = None,
cv_threshold: float = 0.7,
coord_threshold: int = 5) -> Union[List[Tuple], None]:
"""
Находит все вхождения частичного изображения внутри полного изображения.
Args:
image (Union[bytes, np.ndarray, Image.Image, str]):
Частичное изображение или путь к файлу, которое нужно найти внутри полного изображения.
full_image (Union[bytes, np.ndarray, Image.Image, str], optional):
Полное изображение или путь к файлу. По умолчанию None, в этом случае используется скриншот экрана.
cv_threshold (float, optional):
Минимальный порог совпадения для считывания совпадения допустимым. По умолчанию 0.7.
coord_threshold (int, optional):
Максимальное различие между значениями x и y двух кортежей, чтобы они считались слишком близкими друг к другу.
По умолчанию 5 пикселей.
Usages:
app.et_many_coordinates_of_image('path/to/partial_image.png', 'path/to/full_image.png')
app.get_many_coordinates_of_image('path/to/partial_image.png', cv_threshold=0.8, coord_threshold=10)
Returns:
Union[List[Tuple], None]:
Список кортежей, содержащий расположение каждого найденного совпадения в формате (x1, y1, x2, y2).
Если совпадений не найдено, возвращает None.
Note:
При неудаче повторяет выполнение, до трёх раз.
"""
if full_image is None:
screenshot = self._get_screenshot_as_base64_decoded()
big_image = self.to_ndarray(image=screenshot, grayscale=True)
else:
big_image = self.to_ndarray(image=full_image, grayscale=True) # Загрузка полного изображения
small_image = self.to_ndarray(image=image, grayscale=True) # Загрузка частичного изображения
result = self._multi_scale_matching(full_image=big_image, template_image=small_image,
return_raw=True, threshold=cv_threshold)
# Получить все совпадения выше порога
locations = np.where(result >= cv_threshold) # Нахождение всех совпадений выше порога
matches = list(zip(*locations[::-1])) # Преобразование координат в список кортежей
# Фильтрация слишком близких совпадений
unique_list = [] # Создаем пустой список для хранения уникальных кортежей
for (x1_coordinate, y1_coordinate) in matches: # Итерируемся по списку кортежей
exclude = False # Инициализируем флаг exclude значением False
for (x2_coordinate, y2_coordinate) in unique_list: # Итерируемся по уникальным кортежам
if abs(x1_coordinate - x2_coordinate) <= coord_threshold and abs(
y1_coordinate - y2_coordinate) <= coord_threshold:
# Если различие между значениями x и y двух кортежей меньше или равно порогу,
# помечаем exclude как True и выходим из цикла
exclude = True
break
if not exclude: # Если exclude равно False, добавляем кортеж в unique_list
unique_list.append((x1_coordinate, y1_coordinate))
matches = unique_list
if not matches:
self.logger.error(f"_find_many_coordinates_by_image() NO MATCHES, {image=}")
return None
# Добавляем правый нижний угол к каждому найденному совпадению
matches_with_corners = []
for match in matches:
x_coordinate, y_coordinate = match
width, height = small_image.shape[::-1]
top_left = (x_coordinate, y_coordinate)
bottom_right = (x_coordinate + width, y_coordinate + height)
matches_with_corners.append((top_left + bottom_right))
return matches_with_corners
@helpers_decorators.retry
def get_text_coordinates(
self,
text: str,
image: Union[bytes, str, Image.Image, np.ndarray] = None,
language: str = 'rus'
) -> Optional[tuple[int, ...]]:
"""
Возвращает координаты области с указанным текстом на предоставленном изображении или снимке экрана.
Args:
- text (str): Искомый текст.
- image (bytes, str, Image.Image, np.ndarray, опционально): Изображение, на котором осуществляется поиск текста.
Если не указано, будет использован снимок экрана. По умолчанию None.
- language (str, опционально): Язык для распознавания текста. По умолчанию 'rus'.
Usages:
app.get_text_coordinates("Hello, world!")
app.get_text_coordinates("Привет, мир!", language='rus')
app.get_text_coordinates("Hello, world!", image='path/to/image.png')
Returns:
- Union[Tuple[int, int, int, int], None]: Координаты области с текстом или None, если текст не найден.
"""
if not image:
# Получаем снимок экрана, если изображение не предоставлено
screenshot = self._get_screenshot_as_base64_decoded() # Получение снимка экрана в формате base64
image = self.to_ndarray(image=screenshot,
grayscale=True) # Преобразование снимка экрана в массив numpy и преобразование в оттенки серого
else:
# Если предоставлено, то преобразуем
image = self.to_ndarray(image=image,
grayscale=True) # Преобразование изображения в массив numpy и преобразование в оттенки серого
# Бинаризация изображения
_, threshold = cv2.threshold(image, 0, 255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU) # Применение бинаризации для получения двоичного изображения
# Выполнение OCR с помощью PyTesseract
data = pytesseract.image_to_data(threshold, lang=language,
output_type=pytesseract.Output.DICT) # Использование PyTesseract для распознавания текста и получения информации о распознанных словах
formatted_data = {}
for i in range(len(data['text'])):
word_text = data['text'][i] # Текст слова
left = int(data['left'][i]) # Координата левой границы слова
top = int(data['top'][i]) # Координата верхней границы слова
width = int(data['width'][i]) # Ширина слова
height = int(data['height'][i]) # Высота слова
coordinates = [left, top, left + width, top + height] # Координаты рамки слова
if word_text:
if i not in formatted_data:
formatted_data[i] = {}
formatted_data[i] = {'text': word_text,
'coordinates': coordinates} # Сохранение информации о слове и его координатах
# Разбить искомый текст на отдельные слова
words = text.lower().split(' ') # Разделение искомого текста на отдельные слова
# Инициализировать переменные для последовательности слов и соответствующих координат
current_sequence = [] # Текущая последовательность слов
result_coordinates = [] # Координаты текущей последовательности слов
for word_data in formatted_data.values():
word = word_data['text'].lower() # Текущее слово
coordinates = word_data['coordinates'] # Координаты слова
if word in words:
current_sequence.append(word) # Добавление слова в текущую последовательность
result_coordinates.append(coordinates) # Добавление координат слова в результат
else:
if current_sequence == words:
# Если найдена последовательность слов, вернуть соответствующие координаты
top_left = tuple(map(int, result_coordinates[0][:2])) # Верхний левый угол рамки
bottom_right = tuple(map(int, result_coordinates[-1][2:])) # Нижний правый угол рамки
return top_left + bottom_right
current_sequence = [] # Сброс текущей последовательности слов
result_coordinates = [] # Сброс координат последовательности слов
return None
def draw_by_coordinates(self,
image: Union[bytes, str, Image.Image, np.ndarray] = None,
coordinates: Tuple[int, int, int, int] = None,
top_left: Tuple[int, int] = None,
bottom_right: Tuple[int, int] = None,
path: str = None) -> bool:
"""
Рисует прямоугольник на предоставленном изображении или снимке экрана с помощью драйвера.
Args:
image (Union[bytes, str, Image.Image, np.ndarray], optional): Изображение для рисования. По умолчанию None.
coordinates (Tuple[int, int, int, int], optional): Координаты прямоугольника (x1, y1, x2, y2). По умолчанию None.
top_left (Tuple[int, int], optional): Верхняя левая точка прямоугольника. По умолчанию None.
bottom_right (Tuple[int, int], optional): Нижняя правая точка прямоугольника. По умолчанию None.
path (str, optional): Путь для сохранения изображения. По умолчанию None.
Usages:
draw_by_coordinates(image=image_bytes, coordinates=(10, 20, 30, 40), path='path/to/save/image.png')
draw_by_coordinates(top_left=(10, 20), bottom_right=(30, 40))
Returns:
bool: True, если операция выполнена успешно, иначе False.
Raises:
WebDriverException: Если возникают проблемы с WebDriver.
cv2.error: Если возникают проблемы с OpenCV.
Notes:
- Если изображение не предоставлено, будет использован текущий снимок экрана.
- Если не указаны верхняя левая и нижняя правая точки, будут использованы координаты.
"""
try:
if image is None:
# Если изображение не предоставлено, получаем снимок экрана с помощью драйвера
screenshot = self._get_screenshot_as_base64_decoded()
image = self.to_ndarray(screenshot)
else:
image = self.to_ndarray(image)
# Если верхняя левая и нижняя правая точки не предоставлены, используем координаты для определения
# прямоугольника
if not top_left and not bottom_right:
top_left = (coordinates[0], coordinates[1])
bottom_right = (coordinates[2], coordinates[3])
# Сохраняем снимок экрана с нарисованным прямоугольником
if path is None:
path = "screenshot_with_text_coordinates.png"
path = os.path.join(path)
cv2.rectangle(image, top_left, bottom_right, (0, 255, 0), 2)
cv2.imwrite(path, image)
return True
except WebDriverException as e:
# Обрабатываем исключение WebDriverException и записываем ошибку в журнал
self.logger.error(f'draw_by_coordinates() WebDriverException {e}')
return False
except cv2.error as e:
# Обработка исключения cv2.error
self.logger.error(f'draw_by_coordinates() cv2.error: {e}')
return False
@staticmethod
def is_rgb(image: np.ndarray) -> bool:
"""
Проверяет, является ли изображение цветным (RGB).
Аргументы:
- image: np.ndarray - Входное изображение в формате NumPy ndarray.
Возвращает:
- bool - True, если изображение является цветным (RGB), False - в противном случае.
"""
return len(image.shape) == 3 and image.shape[2] == 3 or image.ndim == 3 or image.ndim == '3'
@staticmethod
def is_grayscale(image: np.ndarray) -> bool:
"""
Проверяет, является ли изображение оттенков серого.
Аргументы:
- image: np.ndarray - Входное изображение в формате NumPy ndarray.
Возвращает:
- bool - True, если изображение является оттенков серого, False - в противном случае.
"""
return len(image.shape) == 2 or (
len(image.shape) == 3 and image.shape[2] == 1) or image.ndim == 2 or image.ndim == '2'
def to_grayscale(self, image: np.ndarray) -> np.ndarray:
"""
Преобразует изображение в оттенки серого.
Аргументы:
- image: np.ndarray - Входное изображение в формате ndarray.
Возвращает:
- np.ndarray - Преобразованное изображение в оттенках серого.
"""
# Проверяем, является ли изображение в формате RGB
if self.is_rgb(image):
# Если да, то преобразуем его в оттенки серого
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Приводим значения пикселей к диапазону от 0 до 255
gray_image = cv2.convertScaleAbs(gray_image)
return gray_image
# Иначе, возвращаем изображение без изменений
return image
def to_ndarray(self, image: Union[bytes, np.ndarray, Image.Image, str], grayscale: bool = True) -> np.ndarray:
"""
Преобразует входные данные из различных типов в ndarray (NumPy array).
Аргументы:
- image: Union[bytes, np.ndarray, Image.Image, str] - Входные данные,
представляющие изображение. Может быть типами bytes, np.ndarray, PIL Image или str.
Возвращает:
- np.ndarray - Преобразованный массив NumPy (ndarray) представляющий изображение.
"""
# Если входные данные являются массивом байтов, преобразовать их в массив NumPy
if isinstance(image, bytes):
image = cv2.imdecode(np.frombuffer(image, np.uint8), cv2.IMREAD_COLOR)
# Если входные данные являются строкой с путем к файлу, открыть изображение и преобразовать в массив NumPy
if isinstance(image, str):
# image = np.array(Image.open(image))
image = cv2.imread(image, cv2.IMREAD_COLOR)
# Если входные данные являются объектом PIL Image, преобразовать его в массив NumPy
if isinstance(image, Image.Image):
image = np.array(image)
# Вернуть преобразованный массив NumPy
if grayscale:
return self.to_grayscale(image=image)
return image
def save_screenshot(self, path: str = '', filename: str = 'screenshot.png') -> bool:
"""
Сохраняет скриншот экрана в указанный файл.
Args:
path (str, optional): Путь к директории, где будет сохранен скриншот. По умолчанию пустая строка, что означает текущую директорию.
filename (str, optional): Имя файла, в который будет сохранен скриншот. По умолчанию 'screenshot.png'.
Usages:
save_screenshot(path='/path/to/save', filename='my_screenshot.png')
save_screenshot(filename='another_screenshot.png')
save_screenshot()
Returns:
bool: True, если скриншот успешно сохранен, иначе False.
Raises:
Exception: В случае, если возникают проблемы при сохранении скриншота.
Notes:
- Если путь не указан, скриншот будет сохранен в текущей директории.
- Если имя файла не указано, будет использовано имя 'screenshot.png'.
"""
try:
screenshot = self._get_screenshot_as_base64_decoded()
path_to_file = os.path.join(path, filename)
with open(path_to_file, "wb") as f:
f.write(screenshot)
return True
except Exception as error:
self.logger.error(f"Не удалось сохранить скриншот: {error=}")
return False
def _get_screenshot_as_base64_decoded(self) -> bytes:
"""
Получает скриншот экрана, кодирует его в формате Base64, а затем декодирует в байты.
Args:
Метод не принимает аргументов.
Usages:
screenshot_bytes = self._get_screenshot_as_base64_decoded()
Returns:
bytes: Декодированные байты скриншота, обычно в формате PNG.
Raises:
WebDriverException: Если не удается получить скриншот.
Notes:
- Этот метод предназначен для внутреннего использования и может быть вызван другими методами класса.
- Скриншот возвращается в формате PNG.
- Исходный скриншот получается в формате Base64, который затем кодируется в UTF-8 и декодируется обратно в байты.
"""
try:
screenshot = self.driver.get_screenshot_as_base64().encode('utf-8')
screenshot = base64.b64decode(screenshot)
return screenshot
except WebDriverException as e:
self.logger.error(f"Failed to get screenshot: {e}")
raise
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_helpers/appium_image.py
|
appium_image.py
|
import logging
import os
from typing import Tuple, Dict, Union, List, Optional, Any
from appium.webdriver import WebElement
from selenium.common.exceptions import WebDriverException
from appium_extended_helpers.appium_image import AppiumImage
class AppiumHelpers(AppiumImage):
def __init__(self, driver, logger: logging.Logger = None):
super().__init__(driver=driver, logger=logger)
@staticmethod
def handle_webelement_locator(locator, timeout: int,
elements_range=None,
contains: bool = False) -> Union[WebElement, None]:
"""
Обрабатывает локатор типа WebElement, возвращая его без дополнительных действий.
Args:
locator: Локатор типа WebElement.
timeout: Время ожидания элемента в секундах (игнорируется).
elements_range: Диапазон элементов для поиска (игнорируется).
Returns:
Union[WebElement, None]: Возвращает заданный WebElement.
"""
return locator
def handle_dict_locator(self,
locator,
timeout: int = 10,
elements_range=None,
contains: bool = False) -> Union[Tuple, None]:
"""
Создает локатор xpath на основе переданного словаря-локатора Dict[str, str] и использует его для поиска элемента.
Args:
locator: Словарь-локатор типа Dict[str, str].
timeout: Время ожидания элемента в секундах (игнорируется).
elements_range: Диапазон элементов для поиска (игнорируется).
contains: Ищет элемент содержащий фрагмент значения
Returns:
Union[Tuple, None]: Найденный WebElement или None, если элемент не найден.
"""
if 'class' not in locator:
xpath = "//*"
else:
xpath = "//" + locator['class']
try:
if contains:
for attr, value in locator.items():
xpath += f"[contains(@{attr}, '{value}')]"
new_locator = ("xpath", xpath)
return new_locator
for attr, value in locator.items():
xpath += f"[@{attr}='{value}']"
new_locator = ("xpath", xpath)
return new_locator
except KeyError as e:
self.logger.error(f"Ошибка dict: {locator}")
self.logger.error(f"{str(e)}")
return None
def handle_string_locator(self,
locator,
timeout: int,
elements_range: Union[dict, list, tuple] = None,
contains: bool = False
) -> Union[WebElement, None]:
"""
Обрабатывает строковый локатор и возвращает найденный элемент.
Args:
locator: Строковый локатор для поиска элемента.
timeout: Время ожидания элемента в секундах.
elements_range: Диапазон элементов, в котором нужно искать ближайший к точке элемент.
Если параметр не указан, будет произведен поиск среди всех элементов на странице.
Returns:
Union[WebElement, None]: Найденный WebElement, либо None, если элемент не найден.
"""
if not self.is_image_on_the_screen(image=locator):
return None
# поиск координат фрагмента изображения на экране
screenshot = self._get_screenshot_as_base64_decoded()
full_image = self.to_ndarray(screenshot)
max_loc = self.get_image_coordinates(full_image=full_image, image=locator)
x = max_loc[0]
y = max_loc[1]
# определение списка элементов для поиска
elements = None
locator = ("xpath", "//*")
if isinstance(elements_range, dict):
locator = self.handle_dict_locator(elements_range)
elif isinstance(elements_range, list):
elements = elements_range
elif isinstance(elements_range, tuple):
locator = elements_range
if not elements:
elements = self.driver.find_elements(*locator)
# Поиск ближайшего к координатам элемента
element = self.get_closest_element_to_point(x=x, y=y, elements=elements)
if not element:
self.logger.error(f"Элемент не обнаружен\n"
f"{locator=}\n"
f"{elements_range=}\n")
return element
@staticmethod
def get_closest_element_to_point(x, y, elements,
) -> Union[WebElement, None]:
"""
Ищет ближайший элемент к заданным координатам на экране.
Args:
x (int): Координата по оси X.
y (int): Координата по оси Y.
elements:
Список элементов для поиска.
Returns:
Optional[WebElement]: Найденный элемент, или `None`, если ни один элемент не был найден.
"""
closest_element = None
closest_distance = float("inf")
for element in elements:
left, top, _, _ = map(int,
element.get_attribute('bounds').strip("[]").replace("][", ",").split(","))
distance = ((x - left) ** 2 + (y - top) ** 2) ** 0.5 # Euclidean distance formula
if distance < closest_distance: # and left <= x and top <= y:
closest_distance = distance
closest_element = element
return closest_element
def handle_webelement_locator_elements(self,
locator: List[WebElement],
timeout: int,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True) -> \
Union[List[WebElement], None]:
"""
Обрабатывает локатор типа WebElement и возвращает его без дополнительных действий.
Args:
locator (WebElement): Локатор типа WebElement для обработки.
timeout (int): Время ожидания элемента (игнорируется).
Returns:
Union[WebElement, None]: Заданный WebElement.
"""
if not isinstance(locator[0], WebElement):
self.logger.error(f"Элементы списка не WebElement\n"
f"{locator=}\n"
f"{timeout=}\n\n")
self.logger.error("ERROR in handle_webelement_locator_elements()")
return None
return locator
def handle_dict_locator_elements(self,
locator: Dict[str, str],
timeout: int = 10,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True) -> \
Optional[Tuple[str, str]]:
"""
Обрабатывает локатор типа Dict[str, str], создавая на его основе локатор типа xpath и используя его для поиска элемента.
XPATH нельзя подавать в качестве ключа.
Args:
locator (Dict[str, str]): Локатор типа Dict[str, str].
timeout (int): Время ожидания элемента.
Returns:
Union[WebElement, None]: Найденный WebElement в виде списка, либо None, если элемент не найден.
"""
if 'class' not in locator:
xpath = "//*"
else:
xpath = "//" + locator['class']
try:
if contains:
for attr, value in locator.items():
xpath += f"[contains(@{attr}, '{value}')]"
new_locator = ("xpath", xpath)
return new_locator
for attr, value in locator.items():
xpath += f"[@{attr} = '{value}']"
new_locator = ("xpath", xpath)
return new_locator
except KeyError as e:
self.logger.error(f"Ошибка dict: {locator}")
self.logger.error(f"{str(e)}")
return None
def handle_string_locator_elements(self, # FIXME оптимизировать используя силу xpath и/или xml tree
locator: str,
timeout: int = 10,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
cv_threshold: float = 0.7, # веса откалиброваны
coord_threshold: int = 1,
contains: bool = True) -> \
Union[List, None]:
"""
Обрабатывает строковый локатор, используя его как путь до файла с изображением.
Находит элемент на странице, соответствующий указанному изображению.
Настоятельно рекомендуется использовать диапазон поиска elements_range.
Args:
locator (str): Путь до файла с изображением.
timeout (int): Максимальное время ожидания появления элемента в секундах.
Returns:
Union[List[Tuple], None]: Найденные элементы, либо None, если элементы не найдены.
"""
# Сохранение скриншота изображения и поиск координат совпадающих изображений
with open('full_image.png', 'wb') as file:
file.write(self.driver.get_screenshot_as_png())
max_locs = self.get_many_coordinates_of_image(full_image='full_image.png',
image=locator,
cv_threshold=cv_threshold,
coord_threshold=coord_threshold)
if not max_locs:
self.logger.error("Элементы не обнаружены")
return None
os.remove('full_image.png')
# определение списка элементов для поиска
elements = None
locator = ("xpath", "//*")
if isinstance(elements_range, dict):
locator = self.handle_dict_locator(elements_range)
elif isinstance(elements_range, list):
elements = elements_range
elif isinstance(elements_range, tuple):
locator = elements_range
if not elements:
elements = self.driver.find_elements(*locator)
# Поиск ближайших к координатам элементов
result = []
for max_loc in max_locs:
x = max_loc[0]
y = max_loc[1]
element = self.get_closest_element_to_point(x=x, y=y, elements=elements)
result.append(element)
# удаление родительских элементов
result = self.remove_nesting(result)
# сортировка по координатам
result = self.sort_elements_by_bounds(result)
# debug save to folder TODO удалить фрагмент после тестов
for index, element in enumerate(result):
file_path = os.path.join('core', 'appium', 'unit_test', 'str_elements', f'screenshot_{index}.png')
with open(file_path, 'wb') as file:
file.write(element.screenshot_as_png)
return result
def add_bounds(self, elements: List[WebElement]) -> list[list[Union[WebElement, Any]]]:
"""
Добавляет координаты в список элементов
"""
elements_with_bounds = []
for element in elements:
try:
coord = element.get_attribute("bounds")
left, top, right, bottom = map(int, coord[1:-1].replace("][", ",").split(','))
elements_with_bounds.append([element, left, top, right, bottom])
except WebDriverException as e:
self.logger.error(f"Error sorting elements: {str(e)}")
return elements_with_bounds
@staticmethod
def remove_bounds(elements_with_bounds):
"""
Удаляет координаты из списка элементов. Работает только в связке с add_bounds.
"""
elements_without_bounds = [lst[0] for lst in elements_with_bounds]
return elements_without_bounds
def sort_elements_by_bounds(self, elements: List[WebElement], desc: bool = False) -> Optional[List[WebElement]]:
"""
Сортирует список из WebElement, по значению их верхней координаты.
Args:
elements (List[WebElement]): список из WebElement объектов.
desc (bool): Если False (default), сортирует в обратном порядке.
Returns:
List[WebElement]: сортированный список WebElement объектов.
Если подан не корректный аргумент elements, возвращает None.
Usage:
elements = driver.find_elements_by_xpath("//div[@class='my-class']")
sorted_elements = sort_elements_by_bounds(elements, desc=False)
"""
if not elements or not isinstance(elements, list) or not isinstance(elements[0], WebElement):
self.logger.error(f"Список невозможно сортировать, {elements=}")
return None
elements_with_coords = self.add_bounds(elements)
sorted_elements = sorted(elements_with_coords, key=lambda x: x[2], reverse=desc)
result = self.remove_bounds(sorted_elements)
return result
def remove_nesting(self, elements) -> List[WebElement]: # FIXME реализовать через Axis а не координаты
"""
Проверяет вхождение элементов в другие элементы по координатам.
При обнаружении удаляет большие по размеру.
"""
# удаление дубликатов
elements = list(set(elements))
# добавление координат
elements_with_coords = self.add_bounds(elements)
# поиск элементов имеющих дочерние (по координатам, точное вхождение, без учета оверлапса)
for index, el1 in enumerate(elements_with_coords):
# координаты 0.0 в левом верхнем углу экрана
el1_left_top_x = el1[1]
el1_left_top_y = el1[2]
el1_right_bottom_x = el1[3]
el1_right_boot_y = el1[4]
for el2 in elements_with_coords:
el2_left_top_x = el2[1]
el2_left_top_y = el2[2]
el2_right_bottom_x = el2[3]
el2_right_boot_y = el2[4]
if el1_left_top_x < el2_left_top_x and el1_left_top_y < el2_left_top_y and el1_right_bottom_x > \
el2_right_bottom_x and el1_right_boot_y > el2_right_boot_y:
elements_with_coords[index] = [el1[0], el1[1], el1[2], el1[3], el1[4], 'parent'] # метка
elements_no_parent = []
# формирование списка не имеющих родительских элементов (по координатам)
for element in elements_with_coords:
if len(element) < 6:
elements_no_parent.append(element)
# удаление координат, приведение к списку элементов
result = self.remove_bounds(elements_no_parent)
return result
def find_only_children(self, element, elements):
return
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_helpers/appium_helpers.py
|
appium_helpers.py
|
import base64
import sys
import io
import time
import functools
import traceback
from functools import wraps
from datetime import datetime
import allure
import numpy as np
import pytest
from PIL import Image
# TODO make unit test for module
def retry(func):
"""
Повторяет выполнение метода если он возвращает False или None.
3 tries hardcode in method
"""
max_retries = 3
@wraps(func)
def wrapper(*args, **kwargs):
result = None
for _ in range(max_retries):
result = func(*args, **kwargs)
if result is not None and result is not False:
return result
time.sleep(1)
return result
# Возвращаем обертку функции
return wrapper
def wait_until_window_change(poll_frequency: float = 0.1):
"""
Декоратор, который ожидает пока окно не перестанет меняться.
В обернутом методе должен быть аргумент
decorator_args: dict
timeout_window: общее время ожидания
window_not_changing_period: период времени в течении которого окно не должно изменятся
Аргументы:
poll_frequency (float): Частота опроса содержимого окна на наличие изменений в секундах.
По умолчанию 0.1 секунды.
Возвращает:
function: Декорированная функция.
"""
def func_decorator(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
"""
Оберточная функция, которая инкапсулирует декорированную функцию
с логикой обнаружения изменений окна.
Аргументы:
self: Экземпляр класса, к которому принадлежит
декорированный метод.
*args: Произвольное число аргументов, переданных в
декорированный метод.
**kwargs: Произвольное число именованных аргументов, переданных в
декорированный метод.
Возвращает:
bool: True, если содержимое окна изменяется в течении
заданного периода времени, иначе False.
"""
# Инициализация
func_result = False
decorator_args = kwargs.get('decorator_args', {})
timeout_window = decorator_args.get('timeout_window', 30)
window_not_changing_period = decorator_args.get('window_not_changing_period', 10)
# Запись начального времени
start_time = time.time()
# Вызов декорированной функции и сохранение результата
func_result = func(self, *args,
**kwargs)
# Обнаружение изменений экрана с экспоненциальной задержкой
poll_interval = poll_frequency
# Продолжаем до достижения тайм-аута
while time.time() - start_time < timeout_window:
# Запускаем новый период, в течение которого окно не изменяется
window_not_changing_period_start_time = time.time()
# Флаг для отслеживания того, изменилось ли окно за период
window_not_changed = True
while (time.time() - window_not_changing_period_start_time
< window_not_changing_period):
# Делаем снимок экрана и сохраняем его в памяти
image_bytes = self.driver.get_screenshot_as_png()
# Преобразуем в оттенки серого
image = Image.open(io.BytesIO(image_bytes)).convert('L')
# Обрезаем снимок до определенной области (лево, верх, право, низ)
box = (50, 50, 400, 400)
image = image.crop(box)
# Ждем указанный интервал между опросами
time.sleep(poll_interval)
new_image_bytes = self.driver.get_screenshot_as_png()
# Преобразуем в оттенки серого
new_image = Image.open(io.BytesIO(new_image_bytes)).convert('L')
new_image = new_image.crop(box)
# Проверяем, отличается ли сумма значений пикселей на двух изображениях
if np.sum(image) != np.sum(new_image):
# Содержимое окна изменилось
window_not_changed = False
break
if window_not_changed:
self.logger.debug("Содержимое окна не изменялось в течение периода")
return True
# Удваиваем время ожидания для каждого опроса
poll_interval *= 2
if not func_result:
self.logger.info(f"{func.__name__}() > {func_result}. Изменение экрана: False")
return False
# Возвращаем обертку функции
return wrapper
# Возвращаем декоратор функций
return func_decorator
def wait_for_window_change(poll_frequency: float = 0.5):
"""
Декоратор, который ожидает изменения окна после выполнения метода.
Если окно не изменилось - выполняет еще попытку.
В обернутом методе должен быть аргумент
decorator_args: dict
timeout_window: int, время ожидания на попытку
tries: int, количество попыток выполнения метода
Аргументы:
poll_frequency (float): Частота проверки окна на изменения.
Возвращает:
Декоратор функции.
"""
def func_decorator(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
"""
Оберточная функция, которая выполняет обнаружение изменения окна и
выполнение декорированной функции.
Аргументы:
self: Экземпляр класса, содержащего декорированную функцию.
*args: Переменное число аргументов.
**kwargs: Произвольные именованные аргументы.
Возвращает:
Результат декорированной функции или False, если изменение окна не было обнаружено.
"""
# Инициализация
func_result = False
decorator_args = kwargs.get('decorator_args', {})
timeout_window = decorator_args.get('timeout_window', 10)
tries = decorator_args.get('tries', 3)
# Сделать снимок экрана и сохранить его в памяти в виде байтов
image_bytes = self.driver.get_screenshot_as_png()
# Открыть изображение из байтов и преобразовать его в оттенки серого
image = Image.open(io.BytesIO(image_bytes)).convert(
'L')
# Обрезать снимок экрана до определенной области (лево, верх, право, низ)
box = (50, 50, 400, 400)
# Обрезать изображение на основе заданных координат области
image = image.crop(box)
# Попытаться обнаружить изменение экрана в течении tries попыток
for _ in range(tries):
# Записать текущее время начала попытки обнаружения
start_time = time.time()
# Выполнить декорированную функцию и сохранить результат
func_result = func(self, *args,
**kwargs)
# Обнаружить изменение экрана с экспоненциальной задержкой
poll_interval = poll_frequency
# Проверить, находится ли прошедшее время в пределах заданного окна времени ожидания
while time.time() - start_time < timeout_window:
# Приостановить выполнение на заданный интервал проверки
time.sleep(poll_interval)
# Сделать новый снимок экрана окна и получить данные изображения в виде байтов
new_image_bytes = self.driver.get_screenshot_as_png()
# Открыть новое изображение из байтов и преобразовать его в оттенки серого
new_image = Image.open(io.BytesIO(new_image_bytes)).convert(
'L')
# Обрезать новое изображение на основе заданных координат области
new_image = new_image.crop(
box)
# Сравнить суммы значений пикселей между исходным и новым изображениями
if not np.sum(image) == np.sum(
new_image):
# Записать сообщение о том, что произошло изменение экрана
self.logger.debug(
"Изменение экрана обнаружено")
# Вернуть True для обозначения обнаружения изменения экрана
return True
# Удвоить интервал проверки для следующей итерации
# (экспоненциальная задержка)
poll_interval *= 2
if not func_result:
# Записать сообщение о том, что изменение экрана не было обнаружено
self.logger.info(
f"{func.__name__}() > {func_result}. Изменение экрана: False")
# Вернуть False для обозначения отсутствия обнаружения изменения экрана
return False
# Возвращаем обертку функции
return wrapper
# Возвращаем декоратор функций
return func_decorator
def time_it(func):
"""
Замеряет время выполнения метода.
Печатает результат замера.
"""
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
execution_time = end_time - start_time
print(f"Execution time of {func.__name__}: {execution_time:.2f} seconds")
return result
# Возвращаем обертку функции
return wrapper
def step_info(my_str):
"""
Декоратор, который перед вызовом метода вызывает logger.info и @allure.step,
передавая в них строковую переменную, принятую в параметрах.
Аргументы:
my_str (str): Строковая переменная для использования в logger.info и @allure.step.
Возвращает:
function: Декоратор функций.
Пример использования:
@my_step_info("Мой шаг")
def my_function():
...
"""
# Определяем декоратор функций
def func_decorator(func):
# Создаем обертку функции, сохраняющую метаданные исходной функции
@allure.step(my_str)
def wrapper(self, *args, **kwargs):
result = None
# Логируем информацию перед вызовом метода
self.logger.info(my_str)
# Получаем скриншот до вызова метода
screenshot = self.driver.get_screenshot_as_png()
# Генерируем временную метку для имени скриншота
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
# Устанавливаем имя скриншота до вызова метода
screenshot_name_begin = f"screenshot_begin_{timestamp}.png"
# Имя файла видеозаписи с временной меткой
video_filename = f'screenrecord_{timestamp}.mp4'
self.driver.start_recording_screen()
try:
# Выполняем исходную функцию
result = func(self, *args, **kwargs)
except Exception as error:
# Если произошло исключение, прикрепляем скриншот до вызова метода к отчету
allure.attach(screenshot,
name=screenshot_name_begin,
attachment_type=allure.attachment_type.PNG)
# Получаем скриншот после вызова метода
screenshot = self.driver.get_screenshot_as_png()
# Генерируем временную метку для имени скриншота
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
# Устанавливаем имя скриншота до вызова метода
screenshot_name_end = f"screenshot_end_{timestamp}.png"
# Если произошло исключение, прикрепляем скриншот после вызова метода к отчету
allure.attach(screenshot,
name=screenshot_name_end,
attachment_type=allure.attachment_type.PNG)
# Если произошло исключение, прикрепляем видеозапись выполнения метода к отчету
allure.attach(base64.b64decode(self.driver.stop_recording_screen()),
name=video_filename,
attachment_type=allure.attachment_type.MP4)
# Прикрепляем информацию об ошибке AssertionError к отчету
allure.attach(str(error),
name=str(error),
attachment_type=allure.attachment_type.TEXT)
# Выводим информацию в лог
self.logger.error(f"{my_str} [не выполнено]")
traceback_info = "".join(traceback.format_tb(sys.exc_info()[2]))
error_msg = f"""Ошибка: {error},
{args=},
{kwargs=},
Traceback:
{traceback_info=}
"""
self.logger.error(error_msg)
# В случае исключения помечаем тест провалившимся
try:
pytest.fail(f"{func.__name__}({args}, {kwargs}), {error}")
except Exception as e:
self.logger.error("Pytest не обнаружен")
raise
# Логируем информацию после успешного выполнения метода
self.logger.info(f"{my_str} [выполнено успешно]")
# Возвращаем результат выполнения исходной функции
return result
# Возвращаем обертку функции
return wrapper
# Возвращаем декоратор функций
return func_decorator
def screenshots():
"""
В случае возникновения AssertionError в обернутом методе -
прикрепляет к allure report скриншот до выполнения
метода и после возникновения исключения, а также информацию об ошибке.
В ином случае скриншот не прикрепляется.
"""
# Определяем декоратор функций
def func_decorator(func):
# Создаем обертку функции, сохраняющую метаданные исходной функции
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
# Получаем скриншот до вызова метода
screenshot = self.driver.get_screenshot_as_png()
# Генерируем временную метку для имени скриншота
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
# Устанавливаем имя скриншота до вызова метода
screenshot_name_begin = f"screenshot_begin_{timestamp}.png"
try:
# Выполняем исходную функцию
result = func(self, *args, **kwargs)
except AssertionError as error:
# Если произошло исключение, прикрепляем скриншот до вызова метода к отчету
allure.attach(screenshot,
name=screenshot_name_begin,
attachment_type=allure.attachment_type.PNG)
# Прикрепляем информацию об ошибке AssertionError к отчету
allure.attach(str(error),
name="AssertionError",
attachment_type=allure.attachment_type.TEXT)
# Рейзим исключение AssertionError с сохраненным traceback
raise AssertionError(str(error)).with_traceback(sys.exc_info()[2])
finally:
# Получаем скриншот после вызова метода
screenshot = self.driver.get_screenshot_as_png()
# Обновляем временную метку для имени скриншота
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
# Устанавливаем имя скриншота после вызова метода
screenshot_name_end = f"screenshot_end_{timestamp}.png"
# Прикрепляем скриншот после вызова метода к отчету
allure.attach(screenshot,
name=screenshot_name_end,
attachment_type=allure.attachment_type.PNG)
# Возвращаем результат выполнения исходной функции
return result
# Возвращаем обертку функции
return wrapper
# Возвращаем декоратор функций
return func_decorator
def log_debug():
"""
Логирует начало и завершение обернутого метода
"""
# Определяем декоратор функций
def func_decorator(func):
# Создаем обертку функции, сохраняющую метаданные исходной функции
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
# Получаем имя метода
method_name = func.__name__
# Логируем начало выполнения метода и переданные аргументы
self.logger.debug(f"{method_name}() < {', '.join(map(str, args))}, "
f"{', '.join(f'{k}={v}' for k, v in kwargs.items())}")
# Выполняем исходную функцию
result = func(self, *args, **kwargs)
# Если результат существует, логируем его
if result:
self.logger.debug(f"{method_name}() > {str(result)}")
# Возвращаем результат выполнения исходной функции
return result
# Возвращаем обертку функции
return wrapper
# Возвращаем декоратор функций
return func_decorator
def print_me():
"""
Печатает начало и завершение обернутого метода
"""
# Определяем декоратор функций
def func_decorator(func):
# Создаем обертку функции, сохраняющую метаданные исходной функции
def wrapper(*args, **kwargs):
# Получаем имя метода
method_name = func.__name__
# Печатаем начало выполнения метода и переданные аргументы
print(f"{method_name}() < {', '.join(map(str, args))}, "
f"{', '.join(f'{k}={v}' for k, v in kwargs.items())}")
# Выполняем исходную функцию
result = func(*args, **kwargs)
# Если результат существует, логируем его
if result:
print(f"{method_name}() > {str(result)}")
# Возвращаем результат выполнения исходной функции
return result
# Возвращаем обертку функции
return wrapper
# Возвращаем декоратор функций
return func_decorator
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_helpers/helpers_decorators.py
|
helpers_decorators.py
|
import subprocess
import time
import logging
import requests
class AppiumServer:
def __init__(self, server_ip: str = "127.0.0.1", server_port: int = 4723, logger: logging.Logger = None,
remote_log_level: str = 'error'):
self.server_ip = server_ip
self.server_port = server_port
self.remote_log_level = remote_log_level
self.logger = logger
def start(self) -> bool:
"""
Запускает Appium сервер согласно указанным параметрам.
'appium server -ka 800 --log-level {self.log_level} --log logs/appium_log.txt --log-timestamp
--use-plugins=device-farm,appium-dashboard -p {self.port} -a {self.ip} -pa /wd/hub
--plugin-device-farm-platform=android --allow-insecure=adb_shell'
"""
self.logger.info("Start Appium server")
cmd = f'appium server -ka 800 --log-level {self.remote_log_level} --log logs/appium_log.txt --log-timestamp ' \
f'--use-plugins=device-farm,appium-dashboard -p {self.server_port} -a {self.server_ip} -pa /wd/hub ' \
f'--plugin-device-farm-platform=android --allow-insecure=adb_shell'
try:
subprocess.Popen(cmd, shell=True) # don't use with
return True
except subprocess.CalledProcessError:
self.logger.error("Error starting Appium server: subprocess.CalledProcessError")
return False
except OSError:
self.logger.error("Error starting Appium server: OSError")
return False
def is_alive(self) -> bool:
"""
Отправляет на сервер команду sessions и проверяет код ответа.
Если 200 возвращает True, в ином случае False.
"""
self.logger.info("Checking Appium server status")
try:
response = requests.get(f"http://{self.server_ip}:{self.server_port}/wd/hub/sessions", timeout=180)
if response.status_code == 200:
self.logger.info("Appium server ready")
return True
self.logger.warning(f"Appium server responded with status code {response.status_code}")
return False
except requests.exceptions.RequestException as e:
self.logger.error(f"Error checking Appium server status: {e}")
return False
def stop(self) -> bool:
"""
Останавливает сервер
Только при запуске на столе!
"""
self.logger.info("Stop Appium server")
try:
cmd = 'taskkill /F /IM node.exe'
subprocess.check_output(cmd, shell=True)
return True
except subprocess.CalledProcessError:
return False
def wait_until_alive(self, timeout: int = 600, poll: int = 2):
"""
Ожидает пока сервер не вернет код 200
"""
self.logger.info("Wait for Appium server")
start_time = time.time()
while time.time() < start_time + timeout:
if self.is_alive():
return True
time.sleep(poll)
return False
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_server/appium_server.py
|
appium_server.py
|
import logging
from appium.webdriver import WebElement
from appium_extended_web_element.web_element_get import WebElementGet
from appium_extended_helpers import helpers_decorators
from appium_extended_utils.utils import find_coordinates_by_vector
class WebElementAdbActions(WebElementGet):
"""
Класс для выполнения adb-действий с элементами.
Наследуется от класса WebElementGet.
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(logger=logger, driver=driver, element_id=element_id)
def _adb_tap(self,
decorator_args: dict = None,
wait: bool = False) -> bool:
"""
Выполняет нажатие на элемент с помощью adb.
Аргументы:
decorator_args (dict): Дополнительные аргументы для использования в декораторе.
timeout_window (int): Время ожидания нового окна (умножается на количество попыток).
tries (int): Количество попыток нажатия (по умолчанию 3).
wait (bool): Флаг, указывающий, нужно ли ожидать изменения окна.
Возвращает:
bool: True, если нажатие выполнено успешно; False в противном случае.
"""
if wait:
# Если нужно ожидать изменения окна.
if not decorator_args:
decorator_args = {"timeout_window": 5,
"tries": 5}
return self._adb_tap_to_element_and_wait(decorator_args=decorator_args)
# Если не нужно ожидать изменения окна.
return self._adb_tap_to_element()
def _adb_tap_to_element(self) -> bool:
return self.__adb_tap()
@helpers_decorators.wait_for_window_change()
def _adb_tap_to_element_and_wait(self,
decorator_args: dict = None) -> bool:
return self.__adb_tap()
def __adb_tap(self) -> bool:
"""
Выполняет нажатие на элемент с помощью adb.
Возвращает:
bool: True, если нажатие выполнено успешно, False в противном случае.
"""
try:
x, y = self._get_center()
return self.terminal.tap(x=x, y=y)
except Exception as e:
return False
def _adb_swipe(self,
root,
element: WebElement = None,
x: int = None,
y: int = None,
direction: int = None,
distance: int = None,
duration: int = 1) -> bool:
"""
Выполняет прокрутку с помощью adb.
Аргументы:
root: Корневой элемент на странице.
element (WebElement): Целевой элемент.
x (int): Координата X целевой позиции прокрутки.
y (int): Координата Y целевой позиции прокрутки.
direction (int): Направление прокрутки в градусах (от 0 до 360).
distance (int): Расстояние прокрутки в пикселях.
duration (int): Длительность прокрутки в секундах.
Возвращает:
bool: True, если прокрутка выполнена успешно; False в противном случае.
"""
# Проверка наличия входных данных
if element is None and (x is None or y is None) and (direction is None or distance is None):
return False
# Получение координат центра начальной позиции прокрутки
x1, y1 = self._get_center()
x2, y2 = self._get_center()
# Расчет целевой позиции прокрутки на основе предоставленных входных данных
if element is not None:
# Если предоставлен локатор, получаем координаты центра целевого элемента
x2, y2 = self._get_center(element)
elif x is not None and y is not None:
# Если предоставлены координаты x и y, используем их в качестве целевой позиции прокрутки
x2, y2 = x, y
elif direction is not None and distance is not None:
# Если предоставлены направление и расстояние, вычисляем целевую позицию прокрутки
window_size = self.terminal.get_screen_resolution()
width = window_size[0]
height = window_size[1]
x2, y2 = find_coordinates_by_vector(width=width, height=height,
direction=direction, distance=distance,
start_x=x1, start_y=y1)
# Выполнение adb-команды прокрутки с заданными координатами и длительностью
self.terminal.swipe(start_x=str(x1),
start_y=str(y1),
end_x=str(x2),
end_y=str(y2),
duration=str(duration * 1000))
return True
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_adb_actions.py
|
web_element_adb_actions.py
|
import logging
from typing import Union, Tuple, Dict, List, cast
from appium.webdriver import WebElement
from appium.webdriver.common.appiumby import AppiumBy
from appium.webdriver.common.mobileby import MobileBy
from selenium.webdriver.common.by import By
from appium_extended_web_element.web_element_click import WebElementClick
from appium_extended_web_element.web_element_dom import WebElementDOM
from appium_extended_web_element.web_element_scroll import WebElementScroll
from appium_extended_web_element.web_element_tap import WebElementTap
from appium_extended_web_element.web_element_adb_actions import WebElementAdbActions
class WebElementExtended(WebElementClick,
WebElementAdbActions,
WebElementDOM,
WebElementTap,
WebElementScroll):
"""
Основной интерфейс для работы с WebElementExtended
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(logger=logger, driver=driver, element_id=element_id)
# GET
def get_element(self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elem: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True,
) -> Union['WebElementExtended', None]:
"""
# TODO fill
"""
inner_element = self._get_element(locator=locator,
by=by,
value=value,
timeout_elem=timeout_elem,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains)
return WebElementExtended(logger=self.logger, driver=inner_element.parent, element_id=inner_element.id)
def get_attributes(self,
desired_attributes: Union[str, List[str]] = None,
) -> Union[str, Dict[str, str], None]:
"""
# TODO fill
"""
attributes = self._get_attributes(desired_attributes=desired_attributes)
return attributes
# CLICK
def click(self,
duration: int = 0,
decorator_args: dict = None,
wait: bool = False,
) -> 'WebElementExtended':
"""
Нажимает на элемент.
Args:
duration: время в секундах продолжительности нажатия (по умолчанию 0)
wait: ожидать изменение окна или нет
decorator_args: параметры для декоратора.
timeout_window: int время ожидания нового окна (умножается на количество попыток)
tries: int количество попыток нажатия (по умолчанию 3)
Usage:
decorator_args = {"timeout_window": 5,
"tries": 5}
element._tap(duration=0, wait=True, decorator_args=decorator_args)
Returns:
True если удалось нажать на элемент, иначе False
"""
assert self._click(duration=duration,
wait=wait,
decorator_args=decorator_args)
return cast('WebElementExtended', self)
def double_click(self,
decorator_args: dict = None,
wait: bool = False,
) -> 'WebElementExtended':
"""
fill me
# TODO fill
"""
assert self._double_click(decorator_args=decorator_args,
wait=wait)
return cast('WebElementExtended', self)
def click_and_move(self, locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
x: int = None,
y: int = None,
direction: int = None,
distance: int = None,
) -> 'WebElementExtended':
"""
fill me
# TODO fill
"""
root = self.driver.find_element('xpath', '//*')
root = WebElementExtended(logger=self.logger, driver=root.parent, element_id=root.id)
assert super()._click_and_move(root=root, locator=locator, x=x, y=y, direction=direction, distance=distance)
return cast('WebElementExtended', self)
# ADB TAP
def adb_tap(self,
decorator_args: dict = None,
wait: bool = False,
) -> 'WebElementExtended':
"""
tap by adb
# TODO fill
"""
assert self._adb_tap(wait=wait,
decorator_args=decorator_args)
return cast('WebElementExtended', self)
def adb_swipe(self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
x: int = None,
y: int = None,
direction: int = None,
distance: int = None,
duration: int = 1,
contains: bool = True,
) -> 'WebElementExtended':
"""
swipe by adb
# TODO fill
"""
root = self.driver.find_element('xpath', '//*')
root = WebElementExtended(logger=self.logger, driver=root.parent, element_id=root.id)
element = None
if locator is not None:
element = root.get_element(locator=locator, contains=contains)
assert self._adb_swipe(root=root, element=element,
x=x, y=y,
direction=direction, distance=distance,
duration=duration)
return cast('WebElementExtended', self)
# TAP
def tap(self,
duration: int = 0,
decorator_args: dict = None,
wait: bool = False,
) -> 'WebElementExtended':
"""
# TODO fill
"""
positions = self.get_center()
assert self._tap(positions=[positions],
duration=duration,
decorator_args=decorator_args,
wait=wait)
return cast('WebElementExtended', self)
def double_tap(self,
decorator_args: dict = None,
wait: bool = False,
pause: float = 0.2,
) -> 'WebElementExtended':
"""
# TODO fill
"""
positions = self.get_center()
assert self._double_tap(positions=positions,
decorator_args=decorator_args,
wait=wait,
pause=pause)
return cast('WebElementExtended', self)
def tap_and_move(self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
x: int = None,
y: int = None,
direction: int = None,
distance: int = None,
) -> 'WebElementExtended':
"""
# TODO fill
"""
root = self.driver.find_element('xpath', '//*')
root = WebElementExtended(logger=self.logger, driver=root.parent, element_id=root.id)
assert self._tap_and_move(root=root, locator=locator, x=x, y=y, direction=direction, distance=distance)
return cast('WebElementExtended', self)
# ELEMENTS
def get_elements(self,
locator: Union[Tuple, List[WebElement], Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elements: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True,
) -> Union[List[WebElement], None]:
"""
# TODO fill
"""
elements = self._get_elements(locator=locator,
by=by,
value=value,
timeout_elements=timeout_elements,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains)
result = []
for element in elements:
result.append(WebElementExtended(logger=self.logger,
driver=element.parent,
element_id=element.id))
return result
# SCROLL
def scroll_down(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str] = None,
duration: int = None,
) -> 'WebElementExtended':
"""
Скроллит элемент вниз от нижнего до верхнего элемента.
:param child_locator: str, имя класса дочернего элемента.
:param timeout: int, время ожидания элемента, по умолчанию 10 секунд.
:return: bool, True, если скроллинг выполнен успешно.
# TODO fill
"""
assert self._scroll_down(locator=locator,
duration=duration)
return cast('WebElementExtended', self)
def scroll_up(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str] = None,
duration: int = None,
) -> 'WebElementExtended':
"""
Скроллит элемент вверх от верхнего дочернего элемента до нижнего дочернего элемента родительского элемента.
:param locator: Union[tuple, WebElement], локатор или элемент, который нужно проскроллить.
:param child_locator: str, имя класса дочернего элемента.
:param timeout: int, время ожидания элемента, по умолчанию 10 секунд.
:return: bool, True, если скроллинг выполнен успешно.
# TODO fill
"""
assert self._scroll_up(locator=locator,
duration=duration)
return cast('WebElementExtended', self)
def scroll_to_bottom(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str] = None,
timeout_method: int = 120,
) -> 'WebElementExtended':
"""
# TODO fill
"""
assert self._scroll_to_bottom(locator=locator,
timeout_method=timeout_method)
return cast('WebElementExtended', self)
def scroll_to_top(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str] = None,
timeout_method: int = 120,
) -> 'WebElementExtended':
"""
# TODO fill
"""
assert self._scroll_to_top(locator=locator,
timeout_method=timeout_method)
return cast('WebElementExtended', self)
def scroll_until_find(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str],
timeout_method: int = 120,
contains: bool = True
) -> Union['WebElementExtended', None]:
"""
# TODO fill
"""
if not self._scroll_until_find(locator=locator,
timeout_method=timeout_method,
contains=contains):
return None
return cast('WebElementExtended', self)
def scroll_and_get(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str],
timeout_method: int = 120,
) -> Union['WebElementExtended', None]:
"""
# TODO fill
"""
element = self._scroll_and_get(locator=locator,
timeout_method=timeout_method)
return WebElementExtended(logger=self.logger,
driver=element.parent,
element_id=element.id)
# DOM
def get_parent(self) -> 'WebElementExtended':
"""
# TODO fill
"""
element = self._get_parent()
return WebElementExtended(logger=self.logger, driver=element.parent, element_id=element.id)
def get_parents(self) -> List['WebElementExtended']:
"""
# TODO fill
"""
elements = self._get_parents()
elements_ext = []
for element in elements:
elements_ext.append(WebElementExtended(logger=self.logger, driver=element.parent, element_id=element.id))
return elements_ext
def get_sibling(self,
attributes: Dict[str, str],
contains: bool = True,
) -> 'WebElementExtended':
"""
# TODO fill
"""
element = self._get_sibling(attributes=attributes, contains=contains)
return WebElementExtended(logger=self.logger, driver=element.parent, element_id=element.id)
def get_siblings(self) -> List['WebElementExtended']:
"""
# TODO fill
"""
elements = self._get_siblings()
elements_ext = []
for element in elements:
elements_ext.append(WebElementExtended(logger=self.logger, driver=element.parent, element_id=element.id))
return elements_ext
def get_cousin(self,
ancestor: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str],
cousin: Dict[str, str],
contains: bool = True,
) -> 'WebElementExtended':
"""
# TODO fill
"""
root = self.driver.find_element('xpath', '//*')
root = WebElementExtended(logger=self.logger, driver=root.parent, element_id=root.id)
ancestor = root.get_element(ancestor)
ancestor = WebElement(ancestor.parent, ancestor.id)
element = self._get_cousin(ancestor=ancestor, cousin=cousin, contains=contains)
return WebElementExtended(logger=self.logger, driver=element.parent, element_id=element.id)
def get_cousins(self,
ancestor: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str],
cousin: Dict[str, str],
contains: bool = True,
) -> List['WebElementExtended']:
"""
# TODO fill
"""
root = self.driver.find_element('xpath', '//*')
root = WebElementExtended(logger=self.logger, driver=root.parent, element_id=root.id)
ancestor = root.get_element(ancestor)
ancestor = WebElement(ancestor.parent, ancestor.id)
elements = self._get_cousins(ancestor=ancestor, cousin=cousin, contains=contains)
elements_ext = []
for element in elements:
elements_ext.append(WebElementExtended(logger=self.logger, driver=element.parent, element_id=element.id))
return elements_ext
def is_contains(self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str],
contains: bool = True,
) -> bool:
"""
# TODO fill
"""
child_element = self.get_element(locator=locator, contains=contains)
if child_element is not None:
return True
return False
# ACTIONS
def zoom(self, hold: bool) -> 'WebElementExtended':
"""
# TODO fill
"""
raise NotImplementedError # TODO implement
def unzoom(self, hold: bool) -> 'WebElementExtended':
"""
# TODO fill
"""
raise NotImplementedError # TODO implement
def get_center(self) -> Union[Tuple[int, int], None]:
"""
Вычисляет координаты центра заданного элемента.
Аргументы:
element (WebElement): Веб-элемент.
Возвращает:
tuple: Координаты центра в виде (x, y). Возвращает None, если произошла ошибка.
"""
return self._get_center()
def get_coordinates(self) -> Union[Tuple[int, int, int, int], None]:
"""
# TODO fill
"""
return self._get_coordinates()
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_extended.py
|
web_element_extended.py
|
import logging
from typing import Union, Tuple, Dict, List, Optional
from appium.webdriver import WebElement
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.actions import interaction
from selenium.webdriver.common.actions.action_builder import ActionBuilder
from selenium.webdriver.common.actions.pointer_input import PointerInput
from appium_extended_web_element.web_element_get import WebElementGet
from appium_extended_helpers.helpers_decorators import wait_for_window_change
from appium_extended_utils.utils import find_coordinates_by_vector
class WebElementTap(WebElementGet):
"""
Класс для выполнения действий нажатия (Tap), а также нажатия и перемещения с использованием элементов веб-страницы.
Наследуется от класса WebElementGet.
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(logger=logger, driver=driver, element_id=element_id)
def _tap(self,
positions: List[Tuple[int, int]],
duration: int = 0,
decorator_args: dict = None,
wait: bool = False) -> bool:
"""
Выполняет нажатие на указанные координаты.
Аргументы:
positions (List[Tuple[int, int]]): Список координат X и Y для нажатия.
duration (int): Длительность нажатия в миллисекундах.
decorator_args (dict): Дополнительные аргументы для использования в декораторе.
Например:
decorator_args = {"timeout_window": 5,
"tries": 5}, где
timeout_window (int): время ожидания изменения окна
tries (int): количество попыток для изменения окна
wait (bool): Флаг, указывающий, нужно ли ожидать результата после нажатия.
Возвращает:
bool: True, если нажатие выполнено успешно; False в противном случае.
"""
if wait:
# Если нужно ожидать результата после нажатия
if not decorator_args:
# Декоратор по умолчанию
decorator_args = {"timeout_window": 5,
"tries": 5}
return self._tap_to_element_and_wait(positions=positions, duration=duration, decorator_args=decorator_args)
else:
# Если не нужно ожидать результата после нажатия
return self._tap_to_element(positions=positions, duration=duration)
@wait_for_window_change()
def _tap_to_element_and_wait(self,
positions: List[Tuple[int, int]],
duration: int = 0,
decorator_args: dict = None, ):
return self.__tap(positions=positions, duration=duration)
def _tap_to_element(self,
positions: List[Tuple[int, int]],
duration: int = 0, ):
return self.__tap(positions=positions, duration=duration)
def __tap(self, positions: List[Tuple[int, int]], duration: Optional[int] = None):
"""
Выполняет нажатие по указанным координатам.
Аргументы:
positions (List[Tuple[int, int]]): Список координат X и Y для нажатия.
duration (Optional[int]): Длительность нажатия в миллисекундах.
Возвращает:
bool: True, если нажатие выполнено успешно; False в противном случае.
"""
try:
self.driver.tap(positions=positions, duration=duration)
return True
except Exception as e:
self.logger.error("some exception with __tap(): {}".format(e))
return False
def _double_tap(self,
positions: Tuple[int, int],
decorator_args: dict = None,
wait: bool = False,
pause: float = 0.2) -> bool:
"""
Выполняет двойное нажатие (double tap) на указанных координатах.
Аргументы:
positions (Tuple[int, int]): Координаты X и Y для двойного нажатия.
decorator_args (dict): Дополнительные аргументы для использования в декораторе.
Например:
decorator_args = {"timeout_window": 5,
"tries": 5}, где
timeout_window (int): время ожидания изменения окна
tries (int): количество попыток для изменения окна
wait (bool): Флаг, указывающий, нужно ли ожидать изменения окна после двойного нажатия.
pause (float): Пауза между двумя нажатиями в секундах.
Возвращает:
bool: True, если двойное нажатие выполнено успешно; False в противном случае.
"""
# Декоратор по умолчанию
decorator_args = {"timeout_window": 5,
"tries": 5}
if wait:
# Если нужно ожидать результата после двойного нажатия
return self._double_tap_to_element_and_wait(positions=positions, decorator_args=decorator_args, pause=pause)
else:
# Если не нужно ожидать результата после двойного нажатия
return self._double_tap_to_element(positions=positions, pause=pause)
@wait_for_window_change()
def _double_tap_to_element_and_wait(self, positions: Tuple[int, int], decorator_args: dict = None,
pause: float = 0.2) -> bool:
return self.__double_tap(positions=positions, pause=pause)
def _double_tap_to_element(self, positions: Tuple[int, int], pause: float = 0.2) -> bool:
return self.__double_tap(positions=positions, pause=pause)
def __double_tap(self, positions: Tuple[int, int], pause: float = 0.2) -> bool:
"""
Выполняет двойное нажатие (double tap) по указанным координатам.
Аргументы:
positions (Tuple[int, int]): Координаты X и Y для двойного нажатия.
pause (float): Пауза между двумя нажатиями в секундах.
Возвращает:
bool: True, если двойное нажатие выполнено успешно; False в противном случае.
"""
actions = ActionChains(self.driver)
actions.w3c_actions = ActionBuilder(self.driver, mouse=PointerInput(interaction.POINTER_TOUCH, "touch"))
x = positions[0]
y = positions[1]
# Первое нажатие
actions.w3c_actions.pointer_action.move_to_location(x, y)
actions.w3c_actions.pointer_action.pointer_down()
actions.w3c_actions.pointer_action.pause(0.1)
actions.w3c_actions.pointer_action.pointer_up()
actions.w3c_actions.pointer_action.pause(pause)
# Второе нажатие
actions.w3c_actions.pointer_action.pointer_down()
actions.w3c_actions.pointer_action.pause(0.1)
actions.w3c_actions.pointer_action.release()
try:
actions.perform()
return True
except Exception as e:
self.logger.error("some exception with __double_tap(): {}".format(e))
return False
def _tap_and_move(self,
root=None,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
x: int = None,
y: int = None,
direction: int = None,
distance: int = None,
) -> bool:
"""
Выполняет операцию "нажать и переместить" на веб-элементе или на указанных координатах.
Аргументы:
root (WebElementExtended): Корневой элемент, относительно которого будет выполнено нажатие и перемещение.
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str]): Локатор элемента,
на который будет выполнено нажатие и перемещение.
x (int): Координата X для нажатия и перемещения.
y (int): Координата Y для нажатия и перемещения.
direction (int): Направление прокрутки в градусах (0 - вверх, 90 - вправо, 180 - вниз, 270 - влево).
distance (int): Расстояние прокрутки в пикселях.
Возвращает:
bool: True, если операция успешно выполнена; False в противном случае.
"""
# Получение координат центра начальной позиции прокрутки
x1, y1 = self._get_center()
actions = ActionChains(self.driver)
actions.w3c_actions = ActionBuilder(self.driver, mouse=PointerInput(interaction.POINTER_TOUCH, "touch"))
actions.w3c_actions.pointer_action.move_to_location(x1, y1)
actions.w3c_actions.pointer_action.pointer_down()
# Проверка аргументов для определения типа операции
if (x is None and y is None) and locator is None and (direction is None and distance is None):
# Если не предоставлены аргументы
self.logger.error(f"_tap_and_move(): Нет аргументов")
return False
elif x is not None and y is not None:
# Если указаны координаты для нажатия и перемещения
actions.w3c_actions.pointer_action.move_to_location(x, y)
actions.w3c_actions.pointer_action.release()
actions.perform()
return True
elif locator is not None and root is not None:
# Если указан локатор элемента и корневой элемент
target_element = root.get_element(locator)
x, y = target_element._get_center()
actions.w3c_actions.pointer_action.move_to_location(x, y)
actions.w3c_actions.pointer_action.release()
actions.perform()
return True
elif direction is not None and distance is not None:
# Если предоставлены направление и расстояние, вычисляем целевую позицию прокрутки
window_size = self.terminal.get_screen_resolution()
width = window_size[0]
height = window_size[1]
x2, y2 = find_coordinates_by_vector(width=width, height=height,
direction=direction, distance=distance,
start_x=x1, start_y=y1)
actions.w3c_actions.pointer_action.move_to_location(x2, y2)
actions.w3c_actions.pointer_action.release()
actions.perform()
return True
return False
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_tap.py
|
web_element_tap.py
|
import logging
import time
from typing import Union, Tuple, Dict, Optional
from appium.webdriver import WebElement
from selenium.common.exceptions import StaleElementReferenceException, NoSuchElementException, TimeoutException
from appium_extended_web_element.web_element_get import WebElementGet
class WebElementScroll(WebElementGet):
"""
Класс для выполнения действий прокрутки элемента.
Наследуется от класса WebElementGet.
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(logger=logger, driver=driver, element_id=element_id)
def _scroll_down(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str] = None,
duration: int = None) -> bool:
"""
Прокручивает элемент вниз от нижнего дочернего элемента до верхнего дочернего элемента родительского элемента.
Args:
locator (Union[Tuple, WebElement, Dict[str, str], str], optional): Локатор или элемент для прокрутки (за что крутить).
duration (int, optional): Продолжительность прокрутки в миллисекундах (по умолчанию: None).
Returns:
bool: True, если прокрутка выполнена успешно.
"""
try:
recycler = self
# Проверка, является ли элемент прокручиваемым
if recycler.get_attribute('scrollable') != 'true':
self.logger.error("Элемент не крутиться")
return False
# Если локатор для прокрутки не указан, используется локатор первого дочернего элемента
if not locator:
locator = {'class': self._get_first_child_class()}
# Получение верхнего и нижнего дочерних элементов родительского элемента
top_child = self._get_top_child_from_parent(locator=locator)
bottom_child = self._get_bottom_child_from_parent(locator=locator)
# Прокрутка вниз от нижнего дочернего элемента до верхнего дочернего элемента родительского элемента
self.driver.scroll(origin_el=bottom_child, destination_el=top_child, duration=duration)
return True
except (NoSuchElementException, StaleElementReferenceException, TimeoutException) as e:
self.logger.error("_scroll_down(): Ошибка. {}".format(e))
return False
def _scroll_up(self,
locator: Union[Tuple, 'WebElementExtended', Dict[str, str], str] = None,
duration: int = None) -> bool:
"""
Прокручивает элемент вверх от верхнего дочернего элемента до нижнего дочернего элемента родительского элемента.
Args:
locator (Union[Tuple, WebElement, Dict[str, str], str], optional): Локатор или элемент для прокрутки (за что крутить).
duration (int, optional): Продолжительность прокрутки в миллисекундах (по умолчанию: None).
Returns:
bool: True, если прокрутка выполнена успешно.
"""
try:
recycler = self
# Проверка, является ли элемент прокручиваемым
if recycler.get_attribute('scrollable') != 'true':
self.logger.error("Элемент не крутиться")
return False
# Если локатор для прокрутки не указан, используется локатор первого дочернего элемента
if not locator:
locator = {'class': self._get_first_child_class()}
# Получение верхнего и нижнего дочерних элементов родительского элемента
top_child = self._get_top_child_from_parent(locator=locator)
bottom_child = self._get_bottom_child_from_parent(locator=locator)
# Прокрутка вверх от верхнего дочернего элемента до нижнего дочернего элемента родительского элемента
self.driver.scroll(origin_el=top_child, destination_el=bottom_child, duration=duration)
return True
except (NoSuchElementException, StaleElementReferenceException, TimeoutException) as e:
self.logger.error("_scroll_up(): Ошибка. {}".format(e))
return False
def _scroll_to_bottom(self,
locator: Union[Tuple, WebElement, Dict[str, str], str] = None,
timeout_method: int = 120) -> bool:
"""
Прокручивает элемент вниз до упора.
Args:
locator (Union[Tuple, WebElement, Dict[str, str], str], optional): Локатор или элемент для прокрутки (за что крутить).
timeout_method (int, optional): Время ожидания элемента в секундах (по умолчанию: 120).
Returns:
bool: True, если прокрутка выполнена успешно.
"""
recycler = self
# Проверка, является ли элемент прокручиваемым
if recycler.get_attribute('scrollable') != 'true':
self.logger.error("Элемент не крутиться")
return False
# Если локатор для прокрутки не указан, используется локатор первого дочернего элемента
if not locator:
locator = {'class': self._get_first_child_class()}
last_child = None
start_time = time.time()
# Прокрутка вниз до упора
while time.time() - start_time < timeout_method:
child = self._get_element(locator=locator)
if child == last_child:
return True
last_child = child
self._scroll_down(locator=locator)
self.logger.error("_scroll_to_bottom(): Неизвестная ошибка")
return False
def _scroll_to_top(self,
locator: Union[Tuple, WebElement, Dict[str, str], str],
timeout_method: int = 120) -> bool:
"""
Прокручивает элемент вверх до упора.
Args:
locator (Union[Tuple, WebElement, Dict[str, str], str]): Локатор или элемент для прокрутки (за что крутить).
timeout_method (int): Время ожидания элемента в секундах (по умолчанию: 120).
Returns:
bool: True, если прокрутка выполнена успешно.
"""
recycler = self
# Проверка, является ли элемент прокручиваемым
if recycler.get_attribute('scrollable') != 'true':
self.logger.error("Элемент не крутиться")
return False
# Если локатор для прокрутки не указан, используется локатор первого дочернего элемента
if not locator:
locator = {'class': self._get_first_child_class()}
last_child = None
start_time = time.time()
# Прокрутка вверх до упора
while time.time() - start_time < timeout_method:
child = self._get_element(locator=locator)
if child == last_child:
return True
last_child = child
self._scroll_up(locator=locator)
self.logger.error("_scroll_to_top(): Неизвестная ошибка")
return False
def _scroll_until_find(self,
locator: Union[Tuple, WebElement, Dict[str, str], str],
timeout_method: int = 120,
contains: bool = True) -> bool:
"""
Крутит элемент вниз, а затем вверх для поиска элемента по заданному локатору.
Args:
locator (Union[Tuple, WebElement, Dict[str, str], str]): Локатор или элемент, для которого производится
поиск.
timeout_method (int): Время на поиск в одном направлении (по умолчанию: 120 вниз и 120 вверх).
Returns:
bool: True, если элемент найден. False, если элемент не найден.
"""
recycler = self
# Проверка, является ли элемент scrollable
if recycler.get_attribute('scrollable') != 'true':
self.logger.error("Элемент не крутиться")
return False
start_time = time.time()
last_element_image = None
# Прокрутка вниз до поиска элемента
while time.time() - start_time < timeout_method:
try:
if isinstance(locator, str):
if self.helper.is_image_on_the_screen(image=locator):
return True
element = self._get_element(locator=locator, timeout_elem=1, contains=contains)
if element is not None:
return True
except NoSuchElementException:
continue
current_element_image = self.screenshot_as_base64
if current_element_image == last_element_image:
break
last_element_image = self.screenshot_as_base64
recycler._scroll_down()
# Прокрутка вверх до поиска элемента
while time.time() - start_time < timeout_method:
try:
if isinstance(locator, str):
if self.helper.is_image_on_the_screen(image=locator):
return True
element = self._get_element(locator=locator, timeout_elem=1, contains=contains)
if element is not None:
return True
except NoSuchElementException:
pass
current_element_image = self.screenshot_as_base64
if current_element_image == last_element_image:
break
last_element_image = self.screenshot_as_base64
recycler._scroll_up()
self.logger.error("_scroll_until_find(): Элемент не найден")
return False
def _scroll_and_get(self,
locator: Union[Tuple, WebElement, Dict[str, str], str],
timeout_method: int = 120) -> Optional[WebElement]:
"""
Крутит элемент вниз, а затем вверх для поиска элемента по заданному локатору.
Args:
locator (Union[Tuple, WebElement, Dict[str, str], str]): Локатор или элемент, для которого производится
поиск.
timeout_method (int): Время на поиск в одном направлении (по умолчанию: 120 вниз и 120 вверх).
Returns:
bool: True, если элемент найден. False, если элемент не найден.
"""
recycler = self
# Проверка, является ли элемент scrollable
if recycler.get_attribute('scrollable') != 'true':
self.logger.error("Элемент не крутится")
return None
start_time = time.time()
last_element_image = None
# Прокрутка вниз до поиска элемента
while time.time() - start_time < timeout_method:
try:
if isinstance(locator, str):
if self.helper.is_image_on_the_screen(image=locator):
return self._get_element(locator=locator, timeout_elem=1)
element = self._get_element(locator=locator, timeout_elem=1)
if element is not None:
return element
except NoSuchElementException:
continue
current_element_image = self.screenshot_as_base64
if current_element_image == last_element_image:
break
last_element_image = self.screenshot_as_base64
recycler._scroll_down()
# Прокрутка вверх до поиска элемента
while time.time() - start_time < timeout_method:
try:
if isinstance(locator, str):
if self.helper.is_image_on_the_screen(image=locator):
return self._get_element(locator=locator, timeout_elem=1)
element = self._get_element(locator=locator, timeout_elem=1)
if element is not None:
return element
except NoSuchElementException:
pass
current_element_image = self.screenshot_as_base64
if current_element_image == last_element_image:
break
last_element_image = self.screenshot_as_base64
recycler._scroll_up()
self.logger.error("_scroll_and_get(): Элемент не найден")
return None
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_scroll.py
|
web_element_scroll.py
|
import logging
from typing import Union, Tuple, Dict
from appium.webdriver import WebElement
from selenium.webdriver.common.action_chains import ActionChains
from selenium.common.exceptions import ElementNotInteractableException, StaleElementReferenceException, \
InvalidElementStateException
from appium_extended_web_element.web_element_get import WebElementGet
from appium_extended_helpers.helpers_decorators import wait_for_window_change
from appium_extended_utils.utils import find_coordinates_by_vector
class WebElementClick(WebElementGet):
"""
Класс для выполнения действий клика на элементе, двойного клика и клика с зажатием и перемещением курсора.
Наследуется от класса WebElementGet.
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(logger=logger, driver=driver, element_id=element_id)
def _click(self,
duration: int = 0,
decorator_args: dict = None,
wait: bool = False) -> bool:
"""
Нажимает на элемент.
Аргументы:
duration (int): Время в секундах для продолжительности нажатия (по умолчанию 0).
decorator_args (dict): Параметры для декоратора.
timeout_window (int): Время ожидания нового окна (умножается на количество попыток).
tries (int): Количество попыток нажатия (по умолчанию 3).
wait (bool): Флаг, указывающий, нужно ли ожидать изменения окна.
Использование:
decorator_args = {"timeout_window": 5,
"tries": 5}
element._click(duration=0, wait=True, decorator_args=decorator_args)
Возвращает:
bool: True, если нажатие выполнено успешно; False в противном случае.
"""
if wait:
# Если нужно ожидать изменения окна после нажатия
if not decorator_args:
# Декоратор по умолчанию
decorator_args = {"timeout_window": 5,
"tries": 5}
return self._click_to_element_and_wait(duration=duration, decorator_args=decorator_args)
else:
# Если не нужно ожидать результата после нажатия
return self._click_to_element(duration=duration)
def _click_to_element(self,
duration: int = 0) -> bool:
return self.__click(duration=duration)
@wait_for_window_change()
def _click_to_element_and_wait(self,
duration: int = 0,
decorator_args: dict = None) -> bool:
return self.__click(duration=duration)
def __click(self,
duration: int = 0) -> bool:
"""
Выполняет клик на элементе.
Аргументы:
duration (int): Длительность удержания клика в секундах.
Возвращает:
bool: True, если клик выполнен успешно; False в противном случае.
"""
try:
action = ActionChains(self.driver)
element = self
if duration > 0:
# Если указана длительность клика, выполняется клик с удержанием на заданную длительность
action.click_and_hold(element).pause(duration / 1000).release()
action.perform()
else:
# Если не указана длительность клика, выполняется обычный клик
action.click(element).perform()
except (ElementNotInteractableException, StaleElementReferenceException, InvalidElementStateException) as e:
self.logger.error(f"Не удалось кликнуть по элементу")
self.logger.error("{}".format(e))
return False
return True
def _double_click(self,
decorator_args: dict = None,
wait: bool = False) -> bool:
"""
Выполняет двойное нажатие (double click) на элементе.
Аргументы:
decorator_args (dict): Дополнительные аргументы для использования в декораторе.
Например:
decorator_args = {"timeout_window": 5,
"tries": 5}, где
timeout_window (int): время ожидания изменения окна
tries (int): количество попыток для изменения окна
wait (bool): Флаг, указывающий, нужно ли ожидать выполнения двойного нажатия.
Возвращает:
- True, если двойное нажатие выполнено успешно; False в противном случае.
"""
decorator_args = {"timeout_window": 5,
"tries": 5}
if wait:
return self._double_click_to_element_and_wait(decorator_args=decorator_args)
else:
return self._double_click_to_element()
def _double_click_to_element(self) -> bool:
return self.__double_click()
@wait_for_window_change()
def _double_click_to_element_and_wait(self, decorator_args: dict = None) -> bool:
return self.__double_click()
def __double_click(self):
"""
Выполняет двойное нажатие (double click) на элементе.
Возвращает:
- True, если двойное нажатие выполнено успешно; False в противном случае.
"""
try:
action = ActionChains(self.driver)
action.click(self).click(self).perform()
return True
except InvalidElementStateException:
return True
except (ElementNotInteractableException, StaleElementReferenceException) as e:
self.logger.error(f"Не удалось тапнуть по элементу")
self.logger.error("{}".format(e))
return False
def _click_and_move(self,
root=None,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
x: int = None,
y: int = None,
direction: int = None,
distance: int = None,
) -> bool:
"""
Нажимает левую кнопку мыши, перемещает курсор к указанной цели и отпускает кнопку.
Целью может быть WebElement, абсолютные координаты (x, y) или направление и расстояние.
Если предоставлены направление и расстояние, функция вычисляет целевую позицию
на основе вектора, определенного этими значениями.
Если предоставлены абсолютные координаты (x, y), курсор перемещается в указанные позиции.
Если предоставлен локатор, функция перемещается к найденному элементу на веб-странице.
Параметры:
- root: Первый элемент на странице.
- locator: Локатор для поиска целевого элемента на веб-странице.
- x: Абсолютная координата по оси X для перемещения курсора.
- y: Абсолютная координата по оси Y для перемещения курсора.
- direction: Направление в градусах для перемещения курсора, где 0/360 - вверх, 90 - вправо, 180 - вниз, 270 - влево.
- distance: Расстояние в пикселях для перемещения курсора.
Возвращает:
- True, если действие было успешно выполнено, в противном случае False.
Примечание: Если не предоставлены аргументы, функция возвращает False и регистрирует ошибку.
"""
element = self
action = ActionChains(self.driver)
action.click_and_hold(element)
# Получение координат центра начальной позиции прокрутки
x1, y1 = self._get_center()
if (x is None and y is None) and locator is None and (direction is None and distance is None):
# Если не предоставлены аргументы
self.logger.error(f"_click_and_move(): Нет аргументов")
return False
elif x is not None and y is not None:
# Если указаны абсолютные координаты (x, y) для перемещения курсора
action.move_by_offset(x-x1, y-y1)
action.release().perform()
return True
elif locator is not None and root is not None:
# Если указан локатор элемента и корневой элемент
target_element = root.get_element(locator)
action.move_to_element(target_element)
return True
elif direction is not None and distance is not None:
# Если предоставлены направление и расстояние, вычисляем целевую позицию прокрутки
window_size = self.terminal.get_screen_resolution()
width = window_size[0]
height = window_size[1]
x2, y2 = find_coordinates_by_vector(width=width, height=height,
direction=direction, distance=distance,
start_x=x1, start_y=y1)
action.move_by_offset(x2-x1, y2-y1)
action.release().perform()
return True
return False
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_click.py
|
web_element_click.py
|
import logging
import time
from typing import Union, Dict, List, Tuple
import xml.etree.ElementTree as ET
from appium.webdriver import WebElement
from selenium.common.exceptions import WebDriverException
from appium.webdriver.common.mobileby import MobileBy
from appium.webdriver.common.appiumby import AppiumBy
from selenium.webdriver.common.by import By
from appium_extended_helpers.appium_helpers import AppiumHelpers
class WebElementGet(WebElement):
"""
Класс расширяющий Appium WebElement.
Обеспечивает получение сущностей из элемента.
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(parent=driver, id_=element_id)
self.driver = driver
self.logger = logger
self.helper = AppiumHelpers(driver=self.driver, logger=self.logger)
self.terminal = self.helper.terminal
def _get_element(self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elem: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True
) -> \
Union[WebElement, None]:
"""
Извлекает элемент из элемента.
Должен принимать как минимум либо локатор, либо значения by и value.
Usage:
inner_element = element.get_element(locator=("id", "foo")).
inner_element = element.get_element(element).
inner_element = element.get_element(locator={'text': 'foo'}).
inner_element = element.get_element(locator='/path/to/file/pay_agent.png').
inner_element = element.get_element(locator=part_image,
elements_range={'class':'android.widget.FrameLayout', 'package':'ru.app.debug'}).
inner_element = element.get_element(by="id", value="ru.sigma.app.debug:id/backButton").
inner_element = element.get_element(by=MobileBy.ID, value="ru.sigma.app.debug:id/backButton").
inner_element = element.get_element(by=AppiumBy.ID, value="ru.sigma.app.debug:id/backButton").
inner_element = element.get_element(by=By.ID, value="ru.sigma.app.debug:id/backButton").
Args:
locator: tuple / WebElement / dict / str, определяет локатор элемента.
tuple - локатор в виде ('стратегия', 'значение'), например ('xpath', '//*'), ('id', 'elem_id') и т.д.
WebElement / WebElementExtended - объект веб элемента
dict - словарь, содержащий пары атрибут: значение (элемента), например {'text':'foo', 'enabled':'true'}
str - путь до файла с изображением элемента.
by: MobileBy, AppiumBy, By, str, тип локатора для поиска элемента (всегда в связке с value)
value: str, dict, None, значение локатора или словарь аргументов, если используется AppiumBy.XPATH.
timeout_elem: int, время ожидания элемента.
timeout_method: int, время ожидания метода поиска элемента.
elements_range: tuple, list, dict, None, ограничивает поиск элемента в указанном диапазоне
(для поиска по изображению). По умолчанию - все элементы внутри текущего элемента
Returns:
WebElement или None, если элемент не был найден.
"""
# Проверка и подготовка аргументов
if (not locator) and (not by or not value):
self.logger.error(f"Некорректные аргументы!\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elem=}\n")
return None
if not locator and (by and value):
locator = (by, value)
if locator is None:
return None
# Поиск по изображению в пределах текущего элемента
if elements_range is None:
elements_range = self.find_elements("xpath", ".//*")
# Объявление стратегии поиска элементов
locator_handler = {
# составляет локатор типа tuple из словаря с атрибутами искомого элемента
dict: self.helper.handle_dict_locator,
# производит поиск элементов по фрагменту изображения, возвращает список элементов
str: self.helper.handle_string_locator,
}
# Цикл подготовки локатора и поиска элементов
start_time = time.time()
while time.time() - start_time < timeout_method:
# Выявление типа данных локатора для его подготовки
if isinstance(locator, WebElement):
return locator
locator_type = type(locator)
# Если локатор типа tuple, то выполняется извлечение элементов
if isinstance(locator, tuple):
try:
element = self.find_element(*locator)
return element
except WebDriverException:
# self.logger.error(f"Элемент не обнаружен!\n"
# f"{locator=}\n"
# f"{timeout_elem=}\n\n" +
# "{}\n".format(e))
# self.logger.error(self.driver.page_source)
return None
# Выполнение подготовки локатора
handler = locator_handler.get(locator_type)
if locator is None:
return None
locator = handler(locator=locator,
timeout=int(timeout_elem),
elements_range=elements_range,
contains=contains)
# Подбирает результат после поиска по изображению
if isinstance(locator, WebElement):
return locator
self.logger.error(f"Что-то пошло не так\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elem=}\n"
f"{timeout_method=}\n")
return None
def _get_elements(self,
locator: Union[Tuple, List[WebElement], Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elements: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True) -> \
Union[List[WebElement], None]:
"""
Метод обеспечивает поиск элементов в текущей DOM структуре.
Должен принять либо локатор, либо by и value.
Usage:
elements = app.get_elements(locator=("id", "foo")).
elements = app.get_elements(locator={'text': 'foo'}).
elements = app.get_elements(locator='/path/to/file/pay_agent.png').
elements = app.get_elements(by="id", value="ru.sigma.app.debug:id/backButton").
elements = app.get_elements(by=MobileBy.ID, value="ru.sigma.app.debug:id/backButton").
elements = app.get_elements(by=AppiumBy.ID, value="ru.sigma.app.debug:id/backButton").
elements = app.get_elements(by=By.ID, value="ru.sigma.app.debug:id/backButton").
Args:
locator: tuple or WebElement or Dict[str, str], str, локатор tuple или Веб Элемент или словарь {'атрибут': 'значение'} или str как путь до файла с изображением элемента.
by:[MobileBy, AppiumBy, By, str], тип локатора для поиска элемента (всегда в связке с value)
value: Union[str, Dict, None], значение локатора или словарь аргументов, если используется AppiumBy.XPATH
timeout_elements:
timeout_method:
elements_range:
Returns:
Список WebElement'ов, или пустой список в случае их отсутствия.
"""
# Проверка и подготовка аргументов
if not locator and (not by or not value):
self.logger.error(f"Некорректные аргументы!\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elements=}\n"
f"{timeout_method=}\n"
f"{contains=}")
return None
if not locator and (by and value):
locator = (by, value)
if locator is None:
return None
# Объявление стратегии поиска элементов
locator_handler = {
# подразумевается список элементов, возвращает себя же
list: self.helper.handle_webelement_locator_elements,
# составляет локатор типа tuple из словаря с атрибутами искомого элемента
dict: self.helper.handle_dict_locator_elements,
# производит поиск элементов по фрагменту изображения, возвращает список элементов
str: self.helper.handle_string_locator_elements,
}
# Цикл подготовки локатора и поиска элементов
start_time = time.time()
while not isinstance(locator, list) and time.time() - start_time < timeout_method:
# Если локатор типа tuple, то выполняется извлечение элементов
if isinstance(locator, tuple):
try:
elements = self.find_elements(*locator)
return elements
except WebDriverException:
# self.logger.error(f"Элемент не обнаружен!\n"
# f"{locator=}\n"
# f"{by=}\n"
# f"{value=}\n"
# f"{timeout_elements=}\n"
# f"{timeout_method=}\n"
# f"{contains=}" +
# "{}\n".format(e))
return None
# Выявление типа данных локатора для его подготовки
locator_type = type(locator)
# Выполнение подготовки локатора
handler = locator_handler.get(locator_type)
locator = handler(locator=locator,
timeout=int(timeout_elements),
elements_range=elements_range,
contains=contains)
# Подбирает результат после поиска по изображению
if isinstance(locator, list):
return locator
self.logger.error(f"Что-то пошло не так\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elements=}\n"
f"{timeout_method=}\n"
f"{contains=}")
return None
def _get_attributes(self,
desired_attributes: List[str] = None) -> Dict[str, str]:
"""
Получает атрибуты элемента.
Если хотя бы один запрашиваемый атрибут не найден, возвращает все атрибуты.
Usage:
element._get_attributes(['text', 'bounds', 'class'])
element._get_attributes()
Args:
desired_attributes: список имен атрибутов для получения.
Если не указан, будут возвращены все атрибуты элемента.
Returns:
Если указан desired_attributes и найдены в атрибутах элемента, возвращает словарь с требуемыми атрибутами
и их значениями.
Если desired_attributes не указан или атрибут не найден у элемента, возвращает словарь со всеми атрибутами.
"""
# Инициализация пустого словаря для хранения атрибутов
result = {}
# Если desired_attributes не указан, установка значения 'all'
if not desired_attributes:
desired_attributes = 'all'
# Если desired_attributes не указан, установка значения 'all'
root = ET.fromstring(self.parent.page_source)
# Поиск требуемого элемента по критериям атрибутов
found_element = None
for element in root.iter():
if 'bounds' in element.attrib and 'class' in element.attrib:
if self.get_attribute('bounds') == element.attrib['bounds'] and self.get_attribute('class') == \
element.attrib['class']:
found_element = element
break
# Если элемент найден, получение его атрибутов
if found_element is not None:
attributes = found_element.attrib
# Сохранение атрибутов в словаре result
for attribute_name, attribute_value in attributes.items():
result[attribute_name] = attribute_value
# Если desired_attributes указан, фильтрация словаря result
if desired_attributes:
new_result = {}
for attribute in desired_attributes:
if attribute not in result:
# Возврат всех атрибутов если не найден искомый
return result
new_result[attribute] = result[attribute]
# Возврат отфильтрованных атрибутов
return new_result
# Возврат всех атрибутов
return result
def _get_xpath(self) -> Union[str, None]:
"""
Подбирает атрибуты элемента и на их основе составляет XPath элемента.
Returns:
str: XPath элемента.
"""
try:
# Инициализация переменных
element = self
xpath = "//"
attrs = element._get_attributes()
element_type = attrs.get('class')
except_attrs = ['hint',
'content-desc',
'selection-start',
'selection-end',
'extras',
]
# Формирование начальной части XPath в зависимости от наличия типа (класса) элемента
if element_type:
xpath += element_type
else:
xpath += "*"
# Перебор атрибутов элемента для формирования остальной части XPath
for key, value in attrs.items():
if key in except_attrs:
continue
# Добавление атрибута в XPath с соответствующим значением или без него, если значение равно None
if value is None:
xpath += "[@{}]".format(key)
else:
xpath += "[@{}='{}']".format(key, value)
return xpath
except (AttributeError, KeyError) as e:
self.logger.error("Ошибка при формировании XPath: {}".format(str(e)))
except Exception as e:
self.logger.error("Неизвестная ошибка при формировании XPath: {}".format(str(e)))
return None
def _get_center(self, element: WebElement = None) -> Union[Tuple[int, int], None]:
"""
Получение координат центра элемента.
Returns:
tuple: Координаты x и y центра элемента.
"""
try:
if element:
# Получение границ элемента
left, top, right, bottom = self._get_coordinates()
else:
# Получение границ элемента
left, top, right, bottom = self._get_coordinates()
# Расчет координат центра элемента
x = (left + right) / 2
y = (top + bottom) / 2
return x, y
except Exception as e:
self.logger.error("some exception with _get_center(): {}".format(e))
return None
def _get_coordinates(self) -> Union[Tuple[int, int, int, int], None]:
"""
fill me
"""
try:
left, top, right, bottom = map(int, self.get_attribute('bounds').strip("[]").replace("][", ",").split(","))
return left, top, right, bottom
except WebDriverException as e:
self.logger.error("Ошибка в методе _get_coordinates()")
self.logger.exception(e)
def _get_first_child_class(self) -> str:
"""
Возвращает класс первого дочернего элемента, отличный от родительского
"""
parent_element = self
parent_class = parent_element.get_attribute('class')
child_elements = parent_element.find_elements("xpath", "//*[1]")
for i, child_element in enumerate(child_elements):
child_class = child_element.get_attribute('class')
if parent_class != child_class:
return str(child_class)
def _get_top_child_from_parent(self,
locator: Union[Tuple[str, str], WebElement, Dict[str, str]] = None) -> \
Union[WebElement, None]:
"""
Возвращает самый верхний дочерний элемент родительского элемента.
Если дочерний элемент только один, ищет внутри.
Args:
locator: Кортеж / объект WebElement / словарь, представляющий локатор для дочернего элемента.
Returns:
Самый верхний дочерний элемент родительского элемента, указанному в локаторе дочерних элементов,
или None, если соответствующий дочерний элемент не найден.
"""
if locator is None:
locator = {'class': self._get_first_child_class()}
children = self._get_elements(locator=locator)
if len(children) <= 1:
while not len(children) > 1:
if len(children) == 0:
return None
children = children[0].find_elements(by='xpath', value=f'//*')
top_child = sorted(children, key=lambda x: x.location['y'])[0]
return top_child
def _get_bottom_child_from_parent(self,
locator: Union[Tuple[str, str], WebElement, Dict[str, str]] = None) -> \
Union[WebElement, None]:
"""
Метод возвращает нижний дочерний элемент родительского элемента с заданным классом.
Если дочерний элемент только один, ищет внутри.
Args:
locator: Union[Tuple[str, str], WebElement, Dict[str, str]], локатор дочернего элемента.
Returns:
Найденный элемент или None, в случае его отсутствия.
"""
if locator is None:
locator = {'class': self._get_first_child_class()}
children = self._get_elements(locator=locator)
if len(children) == 0:
return None
if len(children) <= 1:
while not len(children) > 1:
if len(children) == 0:
return None
children = children[0].find_elements(by='xpath', value=f'//*')
bottom_child = sorted(children, key=lambda x: x.location['y'] + x.size['height'])[-1]
return bottom_child
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_get.py
|
web_element_get.py
|
import logging
import time
from typing import Union, List, Dict
from appium.webdriver import WebElement
from selenium.common.exceptions import NoSuchElementException
from appium_extended_web_element.web_element_get import WebElementGet
class WebElementDOM(WebElementGet):
"""
Класс поиска элементов по DOM структуре относительно текущего элемента.
Наследуется от класса WebElementGet.
"""
def __init__(self, logger: logging.Logger, driver, element_id):
super().__init__(logger=logger, driver=driver, element_id=element_id)
self.stable_attributes = ['bounds', 'enabled', 'displayed', 'focused', 'focusable', 'class', 'resource-id',
'text']
def _get_parent(self) -> WebElement:
"""
Возвращает родительский элемент
"""
# Формирование XPath для поиска всех родительского элемента
xpath = self._get_xpath() + "/.."
# Поиск всех родительского элемента по XPath
parent = self.driver.find_element(by='xpath', value=xpath)
return parent
def _get_parents(self) -> List[WebElement]:
"""
Возвращает всех родителей элемента.
Возвращает:
List[WebElement]: Список всех родительских элементов, начиная от ближайшего и до корневого элемента.
"""
# Формирование XPath для поиска всех родительских элементов
xpath = self._get_xpath() + "/ancestor::*"
# Поиск всех родительских элементов по XPath
parents = self.driver.find_elements(by='xpath', value=xpath)
return parents
def _get_sibling(self, attributes: Dict[str, str], contains: bool = True) -> Union[WebElement, None]:
"""
Возвращает брата элемента по указанным атрибутам.
То есть соседнего элемента в пределах первого предка.
Аргументы:
attributes (dict): Словарь с атрибутами и их значениями для поиска брата или сестры.
contains (bool): Флаг, указывающий, использовать ли функцию contains для атрибутов (по умолчанию: True).
Возвращает:
WebElement or None: Брат или сестра элемента, соответствующие указанным атрибутам, или None, если не найдено.
Примечание:
В случае, если используется contains=True и не найдено ни брата, ни сестры, возвращается None.
"""
xpath_attributes = ""
# Формирование XPath атрибутов в зависимости от значения contains
if contains:
# Для поиска по фрагменту значения атрибута
for attr, value in attributes.items():
xpath_attributes += f"[contains(@{attr}, '{value}')]"
else:
# Для поиска по полному совпадению значения атрибута
for attr, value in attributes.items():
xpath_attributes += f"[(@{attr}='{value}')]"
try:
# Поиск брата перед текущим элементом с указанными атрибутами
xpath = self._get_xpath() + "/preceding-sibling::*" + xpath_attributes
sibling_before = self.driver.find_element(by='xpath', value=xpath)
return sibling_before
except NoSuchElementException:
try:
# Поиск брата после текущего элемента с указанными атрибутами
xpath = self._get_xpath() + "/following-sibling::*" + xpath_attributes
sibling_after = self.driver.find_element(by='xpath', value=xpath)
return sibling_after
except NoSuchElementException:
return None
def _get_siblings(self) -> Union[List[WebElement], List]:
"""
Возвращает всех братьев элемента.
То есть соседних элементов в пределах первого предка.
Возвращает:
List[WebElement]: Список всех братьев и сестер элемента.
"""
try:
# Получение XPath текущего элемента
xpath = self._get_xpath() + "/preceding-sibling::*"
# Поиск всех предшествующих братьев
siblings_before = self.driver.find_elements(by='xpath', value=xpath)
# Формирование XPath для последующих братьев
xpath = self._get_xpath() + "/following-sibling::*"
# Поиск всех последующих братьев
siblings_after = self.driver.find_elements(by='xpath', value=xpath)
# Объединение предшествующих и последующих братьев
siblings = siblings_before + siblings_after
return siblings
except NoSuchElementException as e:
self.logger.error("Ошибка при _get_siblings: {}".format(e))
return []
def _get_cousin(self,
ancestor: WebElement,
cousin: Dict[str, str],
contains: bool = True) -> Union[WebElement, None]:
"""
Поиск одного кузена элемента.
То есть элемента находящегося на аналогичной глубине относительно указанного предка.
Аргументы:
ancestor (WebElement): Элемент-предок.
cousin (Dict[str, str]): Атрибуты кузина для поиска.
contains (bool): Флаг, указывающий на использование функции contains при формировании XPath (по умолчанию: True).
Возвращает:
WebElement: Кузин элемента или None, если кузин не найден.
"""
# Получение количество поколений между предком и текущим элементом
generation_len = self._generation_counter(ancestor=ancestor, descendant=self)
# Проверка наличия атрибута 'class' в словаре cousin и получение его значения из текущего элемента,
# если отсутствует
if 'class' not in cousin:
cousin['class'] = self.get_attribute('class')
# Формирование начального XPath с использованием класса кузина
xpath = "//" + cousin['class']
# Формирование XPath с использованием остальных атрибутов кузина
if contains:
# Для поиска по фрагменту значения атрибута
for attr, value in cousin.items():
xpath += f"[contains(@{attr}, '{value}')]"
else:
# Для поиска по полному совпадению значения атрибута
for attr, value in cousin.items():
xpath += f"[@{attr}='{value}']"
# Поиск потенциальных кузенов с помощью XPath
possible_cousins = ancestor.find_elements('xpath', xpath)
# Проверка поколения между предком и каждым потенциальным кузеном и возврат первого подходящего элемента
for element in possible_cousins:
if self._generation_counter(ancestor=ancestor, descendant=element) == generation_len:
return element
return None
def _get_cousins(self, ancestor: WebElement, cousin: Dict[str, str], contains: bool = True) -> \
Union[List[WebElement], List]:
"""
Возвращает список кузенов элемента.
То есть элементов находящихся на аналогичной глубине относительно указанного предка.
Аргументы:
ancestor (WebElement): Элемент-предок.
cousin (dict): Атрибуты кузина для поиска.
contains (bool): Флаг, указывающий на использование функции contains при формировании XPath (по умолчанию: True).
Возвращает:
list: Список элементов-кузенов.
"""
# Получение количество поколений между предком и текущим элементом
generation_len = self._generation_counter(ancestor=ancestor, descendant=self, )
# Проверка наличия атрибута 'class' в словаре cousin и получение его значения из текущего элемента,
# если отсутствует
if 'class' not in cousin:
cousin['class'] = self.get_attribute('class')
# Формирование начального XPath с использованием класса кузена
xpath = "//" + cousin['class']
# Формирование начального XPath с использованием класса кузина
if contains:
# Для поиска по фрагменту значения атрибута
for attr, value in cousin.items():
xpath += f"[contains(@{attr}, '{value}')]"
else:
# Для поиска по полному совпадению значения атрибута
for attr, value in cousin.items():
xpath += f"[@{attr}='{value}']"
# Поиск потенциальных кузенов с помощью XPath
possible_cousins = ancestor.find_elements('xpath', xpath)
result = []
# Проверка поколения между предком и каждым потенциальным кузеном и добавление их в список результатов
for element in possible_cousins:
if self._generation_counter(ancestor=ancestor, descendant=element) == generation_len:
result.append(element)
return result
def _generation_counter(self,
ancestor: WebElement,
descendant: WebElement,
timeout: int = 90) -> int:
"""
Подсчитывает количество поколений между элементами предком и потомком.
Аргументы:
ancestor (WebElement): элемент-предок.
descendant (WebElement): элемент-потомок.
timeout (int): время ожидания в секундах (по умолчанию: 90).
Возвращает:
int: количество поколений между элементами.
"""
# Инициализация
start_time = time.time()
generation_count = 0
current_element = descendant
# Цикл выполняется, пока текущий элемент не станет None, не будет равен предку или не будет превышено время ожидания.
while current_element is not None and current_element != ancestor and time.time() - start_time < timeout:
attributes = {}
# Цикл проходит по всем стабильным атрибутам и получает их значения для текущего элемента.
for attribute in self.stable_attributes:
attributes[attribute] = current_element.get_attribute(attribute)
# Удаление атрибутов со значением None
attributes = {k: v for k, v in attributes.items() if v is not None}
# Создание начального xpath с использованием класса текущего элемента.
xpath = "//" + attributes['class']
# Цикл проходит по всем оставшимся атрибутам и добавляет их в xpath.
for attr, value in attributes.items():
xpath += f"[@{attr}='{value}']"
# Добавление "/.." в конец xpath для перехода к родительскому элементу.
xpath += "/.."
try:
# Поиск предка элемента по xpath
current_element = self.driver.find_element(by='xpath', value=xpath)
generation_count += 1
except NoSuchElementException:
# Если не удалось найти элемент-предка, возвращаем 0 и выводим сообщение об ошибке
self.logger.error("Элементы не связаны связью предок-потомок")
return 0
return generation_count
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_web_element/web_element_dom.py
|
web_element_dom.py
|
import re
import os
import logging
import serial.tools.list_ports
import shutil
import json
from typing import Optional
START_DIR = os.getcwd()
logger = logging.getLogger(__name__)
def extract_numeric(variable: str) -> Optional[float]:
"""
Извлекает числовое значение из переменной.
Аргументы:
variable (str): Переменная, из которой нужно извлечь числовое значение.
Возвращает:
Optional[float]: Числовое значение, извлеченное из переменной.
Если числовое значение не найдено, возвращает None.
"""
number: Optional[float] = None # Инициализируем переменную number значением None
regex = r'-?\d+(?:,\d+)?' # Регулярное выражение для поиска числового значения
match = re.search(regex, variable) # Поиск совпадения в переменной с помощью регулярного выражения
if match:
# Если найдено совпадение, извлекаем числовое значение и преобразуем его в тип float
number = float(match.group().replace(',', '.'))
return number
def find_latest_folder(path: str) -> Optional[str]:
"""
Находит последнюю папку по указанному пути.
Аргументы:
path (str): Путь, в котором нужно найти последнюю папку.
Возвращает:
Optional[str]: Имя последней найденной папки. Если папки не найдены, возвращает None.
"""
# Шаблон имени папки
pattern = re.compile(r"launch_\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
# Получение списка папок в указанном пути
dirs = [d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))]
# Фильтрация папок по шаблону имени
dirs = [d for d in dirs if pattern.match(d)]
# Сортировка папок в обратном порядке
dirs.sort(reverse=True)
if dirs:
# Последняя папка в отсортированном списке
latest_dir = dirs[0]
return str(latest_dir)
else:
return None
def get_com() -> Optional[str]:
"""
Возвращает номер COM-порта для подключенного устройства.
Возвращает:
Optional[str]: Номер COM-порта. Если порт не найден, возвращает None.
"""
ports = serial.tools.list_ports.comports() # Получение списка доступных COM-портов
for port in ports:
if int(port.device[3:]) > 10: # Проверка, является ли номер порта числом больше 10
try:
ser = serial.Serial(port.device) # Попытка открыть последовательное соединение с портом
ser.close() # Закрытие соединения
return port.device[3:] # Возврат номера порта (без префикса "COM")
except serial.SerialException:
pass
return None # Если порт не найден, возвращается None
def copy_file(source: str, destination: str) -> None:
"""
Копирует файл из исходного пути в целевой путь.
Аргументы:
source (str): Исходный путь файла.
destination (str): Целевой путь для копирования файла.
Возвращает:
None
"""
# Отладочное сообщение с выводом исходного и целевого пути
logging.debug("copy_file() source %s, destination %s", source, destination)
try:
# Копирование файла из исходного пути в целевой путь
shutil.copy(source, destination)
# Отладочное сообщение об успешном копировании файла
logging.debug("File copied successfully!")
except IOError as e:
# Сообщение об ошибке при копировании файла
logging.error("Unable to copy file: %s" % e)
def count_currency_numbers(number: int) -> tuple:
"""
Вычисляет количество вхождений купюр разных достоинств в заданную сумму.
Аргументы:
number (int): Сумма, для которой нужно вычислить количество купюр.
Возвращает:
tuple: Кортеж, содержащий количество купюр разных достоинств в порядке убывания достоинства:
(количество купюр 5000, количество купюр 1000, количество купюр 500, количество купюр 100).
"""
if number < 100:
number = 100 # Если сумма меньше 100, устанавливаем ее равной 100 (важно для вычисления сдачи)
count_5000 = number // 5000 # Вычисляем количество купюр достоинством 5000
remainder = number % 5000 # Вычисляем остаток после вычета купюр достоинством 5000
count_1000 = remainder // 1000 # Вычисляем количество купюр достоинством 1000
remainder = remainder % 1000 # Вычисляем остаток после вычета купюр достоинством 1000
count_500 = remainder // 500 # Вычисляем количество купюр достоинством 500
remainder = remainder % 500 # Вычисляем остаток после вычета купюр достоинством 500
count_100 = remainder // 100 # Вычисляем количество купюр достоинством 100
return count_5000, count_1000, count_500, count_100 # Возвращаем кортеж с количеством купюр разных достоинств
def read_json(path: str, filename: str):
"""
Читает JSON-файл из указанного пути и возвращает его данные.
Аргументы:
path (str): Относительный путь к директории, где находится JSON-файл.
filename (str): Имя JSON-файла.
Возвращает:
dict: Данные JSON-файла. Если файл не найден, возвращает None.
"""
filepath = os.path.join(START_DIR, path, filename) # Формируем полный путь к JSON-файлу
try:
with open(filepath, 'r', encoding='utf-8') as f: # Открываем JSON-файл для чтения
data = json.load(f) # Загружаем данные из JSON-файла
except FileNotFoundError:
logging.error("Файл не найден") # Выводим сообщение об ошибке, если файл не найден
return None
return data # Возвращаем данные из JSON-файла
def str_to_float(number: str) -> float:
"""
Преобразует строковое представление суммы в формате float.
Аргументы:
number (str): Строковое представление суммы.
Возвращает:
float: Сумма в формате float.
"""
# Преобразуем аргумент в строку (на случай, если он уже является строкой)
number = str(number)
# Заменяем запятую на точку и удаляем символы "₽" и пробелы, затем преобразуем в float
number = float(number.replace(',', '.').replace('₽', '').replace(' ', ''))
# Возвращаем сумму в формате float
return number
def grep_pattern(input_string, pattern):
lines = input_string.split('\n')
regex = re.compile(pattern)
matched_lines = [line for line in lines if regex.search(line)]
return matched_lines
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_utils/operations.py
|
operations.py
|
import json
import logging
import math
import os
from typing import Tuple
START_DIR = os.getcwd()
PROJECT_ROOT_DIR = os.path.dirname(__file__)
logger = logging.getLogger(__name__)
def write_to_json(path, filename, data):
try:
filepath = os.path.join(START_DIR, path, filename)
with open(filepath, 'x', encoding='utf-8') as f:
json.dump(data, f)
return True
except:
return False
def remove_keys_from_json_files_recursively(keys: list, path: str):
"""
Метод рекурсивно проходит по всем вложенным папкам в поисках .json файлов.
В каждом файле удаляет ключи и значения заданные в параметрах.
Например:
keys_to_remove = ["1038",
"1040",
"1042",
"qr",
"1021",
"1012",
"1042",
"1077",
]
path = os.path.join('test_data', 'FFD_1_05', 'cash')
operations.change_values_in_json_files_recursively(keys=keys_to_remove, path=path)
"""
# Define the directory to traverse
root_dir = os.path.join(START_DIR, path)
# Traverse the directory tree and modify JSON files
for subdir, dirs, files in os.walk(root_dir):
for file in files:
# Check if the file is a JSON file
if file.endswith('.json'):
# Load the JSON data from the file
file_path = os.path.join(subdir, file)
logger.debug(f"file_path: {file_path}")
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
# Delete the text-value pair from the JSON data
for key in keys:
if key in data:
del data[key]
# Write the modified JSON data back to the file
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f)
def change_values_in_json_files_recursively(keys: dict, path: str):
"""
Метод рекурсивно проходит по всем вложенным папкам в поисках .json файлов.
В каждом файле меняет значения у ключей заданных в параметрах.
Например:
keys = {
"1031": 0,
"1081": 1,
}
path = os.path.join('test_data', 'FFD_1_05', 'card')
operations.change_values_in_json_files_recursively(keys=keys, path=path)
"""
# Define the directory to traverse
root_dir = os.path.join(START_DIR, path)
# Traverse the directory tree and modify JSON files
for subdir, dirs, files in os.walk(root_dir):
for file in files:
# Check if the file is a JSON file
if file.endswith('.json'):
# Load the JSON data from the file
file_path = os.path.join(subdir, file)
logger.debug(f"file_path: {file_path}")
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
# Delete the text-value pair from the JSON data
for key in keys:
if key in data:
logger.debug(f"data[text]: {data[key]}")
logger.debug(f"keys[text]: {keys[key]}")
data[key] = keys[key]
# Write the modified JSON data back to the file
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f)
def change_values_in_dict(dict_needs_to_change: dict, changes: dict) -> dict:
"""
Метод изменяет поданный словарь, согласно поданным параметрам (поиск с заменой).
Если значение None, то удаляет ключ.
Возвращает измененный словарь.
"""
logger.debug("change_values_in_dict()")
# Delete the text-value pair from the JSON data
count = 0
for key in changes:
if key in dict_needs_to_change:
if changes[key] is None:
dict_needs_to_change.pop(key)
else:
dict_needs_to_change[key] = changes[key]
count += 1
if count > 0:
logger.debug("change_values_in_dict(): Словарь подготовлен")
return dict_needs_to_change
else:
logger.debug("change_values_in_dict(): В словаре нечего менять")
def find_coordinates_by_vector(width, height, direction: int, distance: int, start_x: int, start_y: int) -> Tuple[int, int]:
"""
fill me
"""
# Расчет конечной точки на основе направления и расстояния
angle_radians = direction * (math.pi / 180) # Преобразование направления в радианы
dy = abs(distance * math.cos(angle_radians))
dx = abs(distance * math.sin(angle_radians))
if 0 <= direction <= 180:
x = start_x + dx
else:
x = start_x - dx
if 0 <= direction <= 90 or 270 <= direction <= 360:
y = start_y - dy
else:
y = start_y + dy
# Обрезка конечной точки до границ экрана
x2 = (max(0, min(x, width)))
y2 = (max(0, min(y, height)))
return x2, y2
def calculate_center_of_coordinates(coordinates: Tuple[int, int, int, int]) -> Tuple[int, int]:
"""
Вычисляет центр координат для четырех точек.
Аргументы:
coordinates (Tuple[int, int, int, int]): Кортеж из четырех целочисленных значений координат: x1, y1, x2, y2.
Возвращает:
Tuple[int, int]: Кортеж из двух целочисленных значений, представляющих координаты центра.
"""
# Распаковываем координаты из кортежа
x1, y1, x2, y2 = coordinates
# Вычисляем центр по оси x путем сложения x1 и x2, деленного на 2
center_x = (x1 + x2) // 2
# Вычисляем центр по оси y путем сложения y1 и y2, деленного на 2
center_y = (y1 + y2) // 2
# Возвращаем кортеж с центральными координатами (center_x, center_y)
return center_x, center_y
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended_utils/utils.py
|
utils.py
|
import logging
import time
from typing import Union, Dict, List, Tuple, Optional, Any
import numpy as np
from PIL import Image
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import WebDriverException, TimeoutException, NoSuchElementException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from appium.webdriver import WebElement
from appium.webdriver.common.mobileby import MobileBy
from appium.webdriver.common.appiumby import AppiumBy
from appium_extended.appium_base import AppiumBase
class AppiumGet(AppiumBase):
"""
Класс расширяющий Appium.
Обеспечивает получение чего-либо со страницы.
"""
def __init__(self, logger: logging.Logger):
super().__init__(logger=logger)
def _get_element(self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elem: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True
) -> \
Union[WebElement, None]:
"""
Метод обеспечивает поиск элемента в текущей DOM структуре.
Должен принимать либо локатор, либо значения by и value.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str], optional):
Определяет локатор элемента.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор,
например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
by (Union[MobileBy, AppiumBy, By, str], optional):
Тип локатора для поиска элемента (всегда в связке с value).
Как в стандартном методе driver.find_element.
value (Union[str, Dict, None], optional):
Значение локатора или словарь аргументов, если используется AppiumBy.XPATH.
timeout_elem (int, optional):
Время ожидания элемента. По умолчанию 10 секунд.
timeout_method (int, optional):
Время ожидания метода поиска элемента. По умолчанию 600 секунд.
elements_range (Union[Tuple, List[WebElement], Dict[str, str], None], optional):
Ограничивает поиск элемента в указанном диапазоне (для поиска по изображению).
contains (bool, optional):
Для поиска по dict и атрибуту 'text',
ищет элемент содержащий фрагмент значения если True
и по строгому соответствию если False.
По умолчанию True.
Usages:
element = app._get_element(locator=("id", "foo"))
element = app._get_element(element)
element = app._get_element(locator={'text': 'foo'}, contains=True)
element = app._get_element(locator='/path/to/file/image.png')
element = app._get_element(by="id", value="backButton")
element = app._get_element(by=MobileBy.ID, value="backButton")
Raises:
NoSuchElementException: Если элемент не найден.
TimeoutException: Если время ожидания истекло.
WebDriverException: Если произошла ошибка при взаимодействии с WebDriver.
Returns:
Union[WebElement, None]: Возвращает WebElement, если элемент найден, иначе None.
"""
# Проверка и подготовка аргументов
if (not locator) and (not by or not value):
self.logger.error(f"Некорректные аргументы!\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elem=}\n")
return None
if not locator and (by and value):
locator = (by, value)
if locator is None:
return None
# Объявление стратегии поиска элементов
locator_handler = {
# возвращает себя же
WebElement: self.helper.handle_webelement_locator,
# возвращает себя же
'WebElementExtended': self.helper.handle_webelement_locator,
# составляет локатор типа tuple из словаря с атрибутами искомого элемента
dict: self.helper.handle_dict_locator,
# производит поиск элементов по фрагменту изображения, возвращает список элементов
str: self.helper.handle_string_locator,
}
# Цикл подготовки локатора и поиска элементов
start_time = time.time()
while not isinstance(locator, WebElement) and time.time() - start_time < timeout_method:
# Выявление типа данных локатора для его подготовки
locator_type = type(locator)
# Если локатор типа tuple, то выполняется извлечение элементов
if isinstance(locator, tuple):
wait = WebDriverWait(driver=self.driver, timeout=timeout_elem)
try:
element = wait.until(EC.presence_of_element_located(locator))
return element
except NoSuchElementException:
return None
except TimeoutException as error:
self.logger.debug(f"Элемент не обнаружен!\n"
f"{locator=}\n"
f"{timeout_elem=}\n\n" +
"{}\n".format(error))
self.logger.debug("page source ", self.driver.page_source)
return None
except WebDriverException as error:
self.logger.debug(f"Элемент не обнаружен!\n"
f"{locator=}\n"
f"{timeout_elem=}\n\n" +
"{}\n".format(error))
self.logger.debug("page source ", self.driver.page_source)
return None
# Выполнение подготовки локатора
handler = locator_handler.get(locator_type)
if locator is None:
return None
locator = handler(locator=locator, timeout=timeout_elem, elements_range=elements_range, contains=contains)
# Подбирает результат после поиска по изображению
if isinstance(locator, WebElement):
return locator
self.logger.error(f"Что-то пошло не так\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elem=}\n"
f"{timeout_method=}\n")
return None
def _get_elements(self,
locator: Union[Tuple, List[WebElement], Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elements: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True) -> \
Union[List[WebElement], None]:
"""
Метод обеспечивает поиск элементов в текущей DOM структуре.
Должен принять либо локатор, либо by и value.
Args:
locator (Union[Tuple, List[WebElement], Dict[str, str], str], optional):
Определяет локатор элементов.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор,
например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
by (Union[MobileBy, AppiumBy, By, str], optional):
Тип локатора для поиска элементов (всегда в связке с value).
Как в стандартном методе driver.find_element.
value (Union[str, Dict, None], optional):
Значение локатора или словарь аргументов, если используется XPATH.
timeout_elements (int, optional):
Время ожидания элементов. По умолчанию 10 секунд.
timeout_method (int, optional):
Время ожидания метода поиска элементов. По умолчанию 600 секунд.
elements_range (Union[Tuple, List[WebElement], Dict[str, str], None], optional):
Ограничивает поиск элементов в указанном диапазоне.
contains (bool, optional):
Для поиска по dict и атрибуту 'text',
True - ищет элемент содержащий фрагмент значения,
False - по строгому соответствию.
По умолчанию True.
Usages:
elements = app._get_elements(locator=("id", "foo"))
elements = app._get_elements(locator={'text': 'foo'})
elements = app._get_elements(locator='/path/to/file/pay_agent.png')
elements = app._get_elements(by="id", value="ru.sigma.app.debug:id/backButton")
elements = app._get_elements(by=MobileBy.ID, value="ru.sigma.app.debug:id/backButton")
Raises:
WebDriverException: Если произошла ошибка при взаимодействии с WebDriver.
Returns:
Union[List[WebElement], None]: Возвращает список WebElement'ов, если элементы найдены, иначе None.
"""
# Проверка и подготовка аргументов
if not locator and (not by or not value):
self.logger.error(f"Некорректные аргументы!\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elements=}\n"
f"{timeout_method=}\n")
return None
if not locator and (by and value):
locator = (by, value)
if locator is None:
return None
# Объявление стратегии поиска элементов
locator_handler = {
# подразумевается список элементов, возвращает себя же
list: self.helper.handle_webelement_locator_elements,
# составляет локатор типа tuple из словаря с атрибутами искомого элемента
dict: self.helper.handle_dict_locator_elements,
# производит поиск элементов по фрагменту изображения, возвращает список элементов
str: self.helper.handle_string_locator_elements,
}
# Цикл подготовки локатора и поиска элементов
start_time = time.time()
while not isinstance(locator, list) and time.time() - start_time < timeout_method:
# Выявление типа данных локатора для его подготовки
locator_type = type(locator)
# Если локатор типа tuple, то выполняется извлечение элементов
if isinstance(locator, tuple):
wait = WebDriverWait(driver=self.driver, timeout=timeout_elements)
try:
element = wait.until(EC.presence_of_all_elements_located(locator))
return element
except WebDriverException as error:
self.logger.debug(f"Элемент не обнаружен!\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elements=}\n"
f"{timeout_method=}\n\n" +
"{}\n".format(error))
return None
# Выполнение подготовки локатора
handler = locator_handler.get(locator_type)
locator = handler(locator=locator,
timeout=timeout_elements,
elements_range=elements_range,
contains=contains)
# Подбирает результат после поиска по изображению
if isinstance(locator, list):
return locator
self.logger.debug(f"\nЧто-то пошло не так\n"
f"{locator=}\n"
f"{by=}\n"
f"{value=}\n"
f"{timeout_elements=}\n"
f"{timeout_method=}\n")
return None
def _get_image_coordinates(self,
image: Union[bytes, np.ndarray, Image.Image, str],
full_image: Union[bytes, np.ndarray, Image.Image, str] = None,
threshold: float = 0.7,
) -> Union[Tuple[int, int, int, int], None]:
return self.helper.get_image_coordinates(image=image, full_image=full_image, threshold=threshold)
def _get_inner_image_coordinates(self,
outer_image_path: Union[bytes, np.ndarray, Image.Image, str],
inner_image_path: Union[bytes, np.ndarray, Image.Image, str],
threshold: float = 0.9) -> \
Union[Tuple[int, int, int, int], None]:
return self.helper.get_inner_image_coordinates(outer_image_path=outer_image_path,
inner_image_path=inner_image_path,
threshold=threshold)
def _get_text_coordinates(self,
text: str,
language: str = 'rus',
image: Union[bytes, str, Image.Image, np.ndarray] = None, ) -> Optional[tuple[int, ...]]:
return self.helper.get_text_coordinates(text=text, language=language, image=image)
def _get_screenshot_as_base64_decoded(self):
return self.helper._get_screenshot_as_base64_decoded()
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended/appium_get.py
|
appium_get.py
|
import logging
import time
from typing import Union, Dict, List, Tuple
import numpy as np
from PIL import Image
from appium.webdriver import WebElement
from appium_extended.appium_get import AppiumGet
class AppiumWait(AppiumGet):
"""
Класс расширяющий Appium.
Обеспечивает ....
"""
def __init__(self, logger: logging.Logger):
super().__init__(logger=logger)
def _wait_for(self,
locator: Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str,
List[Tuple[str, str]], List[WebElement], List['WebElementExtended'], List[Dict[str, str]], List[
str]] = None,
image: Union[bytes, np.ndarray, Image.Image, str,
List[bytes], List[np.ndarray], List[Image.Image], List[str]] = None,
timeout: int = 10,
contains: bool = True,
):
"""
Ожидает появления на экране указанного локатора или изображения.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict, str, List], optional):
- Tuple: локатор в виде кортежа из двух строковых элементов, где первый это стратегия поиска, а второй это селектор, например ("id", "android.widget.ProgressBar").
- Dict: локатор в виде словаря {'text': 'foo', 'displayed': 'true', 'enabled': 'true'}.
- str: путь до изображения.
- List: список из локаторов. Будет ожидание всех элементов из списка.
По умолчанию None.
image (Union[bytes, np.ndarray, Image.Image, str, List], optional):
- bytes: изображение в формате байтов.
- np.ndarray: изображение в формате массива NumPy.
- Image.Image: изображение в формате Image (PIL/Pillow).
- str: путь до файла с изображением.
По умолчанию None.
timeout (int, optional): Максимальное время ожидания в секундах. По умолчанию 10.
contains (bool, optional): Если True, проверяет, содержит ли элемент указанный локатор.
По умолчанию True.
Usages:
- _wait_for(locator=("id", "android.widget.ProgressBar"), timeout=5)
- _wait_for(image="path/to/image.png", timeout=10)
- _wait_for(locator=[("id", "element1"), ("name", "element2")], timeout=5)
- _wait_for(image=["path/to/image1.png", "path/to/image2.png"], timeout=10)
Returns:
bool: True, если элементы или изображения найдены в течение заданного времени, иначе False.
Raises:
None: Метод не вызывает исключений.
Notes:
- Метод использует внутренние функции для поиска элементов и изображений.
- Параметр `contains` используется только при поиске по локатору.
"""
if locator is not None:
if not isinstance(locator, List):
locator = [locator]
# Loop through each locator
for i in locator:
# Check if the element is present
if self._get_element(locator=i, timeout_elem=timeout, contains=contains) is None:
return False
if image is not None:
start_time = time.time()
if not isinstance(image, List):
image = [image]
# Loop through each image
for i in image:
# Check if the image is on the screen within the timeout period
while not self.helper.is_image_on_the_screen(image=i) and time.time() - start_time < timeout:
time.sleep(1)
if not self.helper.is_image_on_the_screen(image=i):
return False
# Return True if all conditions are met
return True
def _wait_for_not(self,
locator: Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str,
List[Tuple[str, str]], List[WebElement], List['WebElementExtended'], List[Dict[str, str]], List[
str]] = None,
image: Union[bytes, np.ndarray, Image.Image, str,
List[bytes], List[np.ndarray], List[Image.Image], List[str]] = None,
timeout: int = 10,
contains: bool = True,
):
"""
Ожидает пока указанный локатор или изображение исчезнет с экрана или DOM.
Args:
locator (Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str,
List[Tuple[str, str]], List[WebElement], List['WebElementExtended'],
List[Dict[str, str]], List[str]], optional): The locator(s) to wait for.
Can be a single locator or a list of locators. Defaults to None.
image (Union[bytes, np.ndarray, Image.Image, str,
List[bytes], List[np.ndarray], List[Image.Image], List[str]], optional):
The image(s) to wait for. Can be a single image or a list of images.
Defaults to None.
timeout (int, optional): The maximum time to wait in seconds. Defaults to 10.
contains (bool, optional): If True, checks if the element contains the specified locator.
If False, checks if the element exactly matches the specified locator.
Defaults to True.
Returns:
bool: True if the element(s) are found within the timeout period, False otherwise.
"""
if locator is not None:
if not isinstance(locator, List):
locator = [locator]
# Loop through each locator
start_time = time.time()
while time.time() - start_time < timeout:
locators_present = False
for i in locator:
# Check if the element is present
if not self._get_element(locator=i, timeout_elem=1, contains=contains) is None:
locators_present = True
if not locators_present:
return True
time.sleep(1)
raise TimeoutError
if image is not None:
if not isinstance(image, List):
image = [image]
# Loop through each image
start_time = time.time()
while time.time() - start_time < timeout:
images_present = False
for i in image:
# Check if the image is on the screen within the timeout period
if self.helper.is_image_on_the_screen(image=i):
images_present = True
if not images_present:
return True
time.sleep(1)
raise TimeoutError
return False
@staticmethod
def _wait_return_true(method, timeout: int = 10):
"""
Ожидает пока метод не вернет True.
Args:
method: ссылка на метод
timeout: таймаут на ожидание
"""
start_time = time.time()
while time.time() - start_time < timeout:
if method():
return
time.sleep(1)
raise TimeoutError
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended/appium_wait.py
|
appium_wait.py
|
import logging
import os
import time
from typing import Union, Tuple, Dict, List, Optional, cast, Any
import numpy as np
from PIL import Image
from appium.webdriver.common.appiumby import AppiumBy
from appium.webdriver.common.mobileby import MobileBy
from appium.webdriver import WebElement
from selenium.webdriver.common.by import By
from appium_extended.appium_swipe import AppiumSwipe
from appium_extended.appium_wait import AppiumWait
from appium_extended.appium_tap import AppiumTap
from appium_extended.appium_is import AppiumIs
from appium_extended_exceptions.appium_extended_exceptions import TapError, GetElementError, GetElementsError, \
GetImageCoordinatesError, GetInnerImageCoordinatesError, GetManyCoordinatesOfImageError, GetTextCoordinatesError, \
FindAndGetElementError, IsElementWithinScreenError, IsTextOnScreenError, IsImageOnScreenError, SaveSourceError, \
GetScreenshotError, ExtractPointCoordinatesError, ExtractPointCoordinatesByTypingError, SaveScreenshotError, \
DrawByCoordinatesError, WaitReturnTrueError, WaitForNotError, WaitForError, SwipeError
from appium_extended_web_element.web_element_extended import WebElementExtended
from appium_extended_utils import utils
class AppiumExtended(AppiumIs, AppiumTap, AppiumSwipe, AppiumWait):
"""
Класс работы с Appium.
Обеспечивает работу с устройством
"""
def __init__(self, logger: logging.Logger = None, log_level: int = logging.INFO, log_path: str = ''):
if logger is None:
logger = logging.getLogger(__name__)
logger.setLevel(log_level)
if bool(log_path):
if not log_path.endswith('.log'):
log_path = log_path + '.log'
file_handler = logging.FileHandler(log_path)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
super().__init__(logger=logger)
def get_element(self,
locator: Union[Tuple, WebElementExtended, Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elem: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElementExtended], Dict[str, str], None] = None,
contains: bool = True,
) -> Union[WebElementExtended, None]:
"""
Метод обеспечивает поиск элемента в текущей DOM структуре.
Должен принимать либо локатор, либо значения by и value.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str], optional):
Определяет локатор элемента.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор, например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
str - путь до изображения. Будет искать изображение, вычислять координаты и
искать в DOM ближайший к этим координатам элемент
by (Union[MobileBy, AppiumBy, By, str], optional):
Тип локатора для поиска элемента (всегда в связке с value).
Как в стандартном методе driver.find_element.
value (Union[str, Dict, None], optional):
Значение локатора или словарь аргументов, если используется XPATH.
timeout_elem (int, optional):
Время ожидания элемента. По умолчанию 10 секунд.
timeout_method (int, optional):
Время ожидания метода поиска элемента. По умолчанию 600 секунд.
elements_range (Union[Tuple, List[WebElement], Dict[str, str], None], optional):
Ограничивает поиск элемента в указанном диапазоне (для поиска по изображению).
contains (bool, optional):
Для поиска по dict и атрибуту 'text',
True - ищет элемент содержащий фрагмент значения
False - по строгому соответствию.
По умолчанию True.
Usages:
element = app._get_element(locator=("id", "foo"))
element = app._get_element(element)
element = app._get_element(locator={'text': 'foo'}, contains=True)
element = app._get_element(locator='/path/to/file/image.png')
element = app._get_element(by="id", value="backButton")
element = app._get_element(by=MobileBy.ID, value="backButton")
Returns:
Union[WebElementExtended, None]: Возвращает WebElementExtended, если элемент найден, иначе None.
"""
try:
element = self._get_element(locator=locator,
by=by,
value=value,
timeout_elem=timeout_elem,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains)
except Exception as error:
raise GetElementError(message=f"Ошибка при попытке извлечь элемент {error}",
locator=locator,
by=by,
value=value,
timeout_elem=timeout_elem,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains,
original_exception=error
) from error
if element is None:
raise GetElementError(message="Элемент не найден",
locator=locator,
by=by,
value=value,
timeout_elem=timeout_elem,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains
)
return WebElementExtended(driver=element.parent, element_id=element.id, logger=self.logger)
def get_elements(self,
locator: Union[Tuple, List[WebElement], Dict[str, str], str] = None,
by: Union[MobileBy, AppiumBy, By, str] = None,
value: Union[str, Dict, None] = None,
timeout_elements: int = 10,
timeout_method: int = 600,
elements_range: Union[Tuple, List[WebElement], Dict[str, str], None] = None,
contains: bool = True,
) -> Union[List[WebElementExtended], List]:
"""
Метод обеспечивает поиск элементов в текущей DOM структуре.
Должен принять либо локатор, либо by и value.
Args:
locator (Union[Tuple, List[WebElement], Dict[str, str], str], optional):
Определяет локатор элементов.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор,
например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
by (Union[MobileBy, AppiumBy, By, str], optional):
Тип локатора для поиска элементов (всегда в связке с value).
Как в стандартном методе driver.find_element.
value (Union[str, Dict, None], optional):
Значение локатора или словарь аргументов, если используется XPATH.
timeout_elements (int, optional):
Время ожидания элементов. По умолчанию 10 секунд.
timeout_method (int, optional):
Время ожидания метода поиска элементов. По умолчанию 600 секунд.
elements_range (Union[Tuple, List[WebElement], Dict[str, str], None], optional):
Ограничивает поиск элементов в указанном диапазоне.
contains (bool, optional):
Для поиска по dict и атрибуту 'text',
True - ищет элемент содержащий фрагмент значения,
False - по строгому соответствию.
По умолчанию True.
Usages:
elements = app._get_elements(locator=("id", "foo"))
elements = app._get_elements(locator={'text': 'foo'})
elements = app._get_elements(locator='/path/to/file/pay_agent.png')
elements = app._get_elements(by="id", value="ru.sigma.app.debug:id/backButton")
elements = app._get_elements(by=MobileBy.ID, value="ru.sigma.app.debug:id/backButton")
Returns:
Union[List[WebElementExtended], List]: Возвращает список объектов WebElementExtended,
если элементы найдены, иначе пустой список.
"""
try:
elements = super()._get_elements(locator=locator,
by=by,
value=value,
timeout_elements=timeout_elements,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains)
except Exception as error:
raise GetElementsError(message=f"Ошибка при попытке извлечь элементы: {error}",
by=by,
value=value,
timeout_elements=timeout_elements,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains,
original_exception=error,
) from error
if elements is None:
raise GetElementsError(message="Элементы не найдены",
by=by,
value=value,
timeout_elements=timeout_elements,
timeout_method=timeout_method,
elements_range=elements_range,
contains=contains
)
elements_ext = []
for element in elements:
elements_ext.append(
WebElementExtended(driver=element.parent, element_id=element.id, logger=self.logger))
return elements_ext
def get_image_coordinates(self,
image: Union[bytes, np.ndarray, Image.Image, str],
full_image: Union[bytes, np.ndarray, Image.Image, str] = None,
threshold: float = 0.7,
) -> Union[Tuple, None]:
"""
Находит координаты наиболее вероятного совпадения частичного изображения в полном изображении.
Args:
image (Union[bytes, np.ndarray, Image.Image, str]):
Частичное изображение или путь к файлу, которое нужно найти внутри полного изображения.
full_image (Union[bytes, np.ndarray, Image.Image, str], optional):
Полное изображение или путь к файлу. По умолчанию None, в этом случае используется скриншот экрана.
threshold (float, optional):
Минимальный порог совпадения для считывания совпадения допустимым. По умолчанию 0.7.
Usages:
app.get_image_coordinates('path/to/partial_image.png', 'path/to/full_image.png')
app.get_image_coordinates('path/to/partial_image.png', threshold=0.8)
Returns:
Union[Tuple[int, int, int, int], None]:
Кортеж с координатами наиболее вероятного совпадения (x1, y1, x2, y2)
или None, если совпадение не найдено.
Note:
При неудаче повторяет выполнение, до трёх раз.
"""
try:
coordinates = self._get_image_coordinates(full_image=full_image,
image=image,
threshold=threshold)
except Exception as error:
raise GetImageCoordinatesError(message=f"Ошибка при попытке извлечения координат изображения: {error}",
full_image=full_image,
image=image,
threshold=threshold,
original_exception=error
) from error
if coordinates is None:
raise GetImageCoordinatesError(message="Изображение не найдено",
full_image=full_image,
image=image,
threshold=threshold
)
return coordinates
def get_inner_image_coordinates(self,
outer_image_path: Union[bytes, np.ndarray, Image.Image, str],
inner_image_path: Union[bytes, np.ndarray, Image.Image, str],
threshold: Optional[float] = 0.9
) -> Union[Tuple[int, int, int, int], None]:
"""
Сначала находит изображение на экране,
затем внутри него находит внутреннее изображение.
Args:
outer_image_path (Union[bytes, np.ndarray, Image.Image, str]):
Внешнее изображение или путь к файлу, которое нужно найти на экране.
inner_image_path (Union[bytes, np.ndarray, Image.Image, str]):
Внутреннее изображение или путь к файлу, которое нужно найти внутри внешнего изображения.
threshold (float, optional):
Пороговое значение сходства для шаблонного сопоставления. По умолчанию 0.9.
Usages:
app.get_inner_image_coordinates('path/to/outer_image.png', 'path/to/inner_image.png')
app.get_inner_image_coordinates('path/to/outer_image.png', 'path/to/inner_image.png', threshold=0.8)
Returns:
Union[Tuple[int, int, int, int], None]:
Координаты внутреннего изображения относительно экрана в формате (x1, y1, x2, y2).
Если внутреннее изображение не найдено, возвращает None.
Note:
При неудаче повторяет выполнение, до трёх раз.
"""
try:
inner_image_coordinates = self._get_inner_image_coordinates(outer_image_path=outer_image_path,
inner_image_path=inner_image_path,
threshold=threshold)
except Exception as error:
raise GetInnerImageCoordinatesError(message=f"Ошибка при попытке извлечь внутреннее изображение: {error}",
outer_image_path=outer_image_path,
inner_image_path=inner_image_path,
threshold=threshold,
original_exception=error
) from error
if inner_image_coordinates is None:
raise GetInnerImageCoordinatesError(message="Внутреннее изображение не найдено",
outer_image_path=outer_image_path,
inner_image_path=inner_image_path,
threshold=threshold
)
return inner_image_coordinates
def get_many_coordinates_of_image(self,
image: Union[bytes, np.ndarray, Image.Image, str],
full_image: Union[bytes, np.ndarray, Image.Image, str] = None,
cv_threshold: Optional[float] = 0.7,
coord_threshold: Optional[int] = 5,
) -> Union[List[Tuple], None]:
"""
Находит все вхождения частичного изображения внутри полного изображения.
Args:
image (Union[bytes, np.ndarray, Image.Image, str]):
Частичное изображение или путь к файлу, которое нужно найти внутри полного изображения.
full_image (Union[bytes, np.ndarray, Image.Image, str], optional):
Полное изображение или путь к файлу. По умолчанию None, в этом случае используется скриншот экрана.
cv_threshold (float, optional):
Минимальный порог совпадения для считывания совпадения допустимым. По умолчанию 0.7.
coord_threshold (int, optional):
Максимальное различие между значениями x и y двух кортежей,
чтобы они считались слишком близкими друг к другу.
По умолчанию 5 пикселей.
Usages:
app.et_many_coordinates_of_image('path/to/partial_image.png', 'path/to/full_image.png')
app.get_many_coordinates_of_image('path/to/partial_image.png', cv_threshold=0.8, coord_threshold=10)
Returns:
Union[List[Tuple], None]:
Список кортежей, содержащий расположение каждого найденного совпадения в формате (x1, y1, x2, y2).
Если совпадений не найдено, возвращает None.
Note:
При неудаче повторяет выполнение, до трёх раз.
"""
try:
coordinates = self.helper.get_many_coordinates_of_image(full_image=full_image,
image=image,
cv_threshold=cv_threshold,
coord_threshold=coord_threshold)
except Exception as error:
raise GetManyCoordinatesOfImageError(
message=f"Ошибка при попытке извлечения координат изображений: {error}",
image=image,
full_image=full_image,
cv_threshold=cv_threshold,
coord_threshold=coord_threshold,
original_exception=error) from error
if coordinates is None:
raise GetManyCoordinatesOfImageError(message="Совпадения не найдены",
image=image,
full_image=full_image,
cv_threshold=cv_threshold,
coord_threshold=coord_threshold)
return coordinates
def get_text_coordinates(self,
text: str,
language: Optional[str] = 'rus',
image: Union[bytes, str, Image.Image, np.ndarray] = None,
ocr: Optional[bool] = True,
contains: bool = True
) -> Union[tuple[int, ...], tuple[int, int, int, int], None]:
"""
Возвращает координаты области с указанным текстом на предоставленном изображении или снимке экрана.
Метод может работать в двух режимах: с использованием OCR (оптического распознавания символов) или
с использованием метода get_element для поиска элемента по тексту.
Args:
- text (str): Искомый текст.
- image (bytes, str, Image.Image, np.ndarray, опционально): Изображение, на котором
осуществляется поиск текста. Для OCR поиска.
Если не указано, будет использован снимок экрана. По умолчанию None.
- language (str, опционально): Язык для распознавания текста. По умолчанию 'rus'.
Для OCR поиска.
- ocr (bool, опционально): Использовать ли OCR для поиска текста. По умолчанию True.
- contains (bool): Искать строгое соответствие текста или вхождение. Для поиска по DOM.
Usages:
app.get_text_coordinates("Hello, world!")
app.get_text_coordinates("Привет, мир!", language='rus')
app.get_text_coordinates("Hello, world!", image='path/to/image.png')
app.get_text_coordinates("Hello, world!", ocr=False, contains=False)
Returns:
- Union[Tuple[int, int, int, int], None]: Координаты области с текстом или None, если текст не найден.
Если ocr=False, возвращаются координаты, полученные с помощью метода get_element.
"""
if ocr:
try:
coordinates = self._get_text_coordinates(text=text, language=language, image=image)
except Exception as error:
raise GetTextCoordinatesError(message=f"""
Ошибка при попытке найти координаты изображения с использованием OCR: {error}""",
text=text,
language=language,
image=image,
ocr=True,
original_exception=error) from error
if coordinates is None:
raise GetTextCoordinatesError(message="Текст не найден при использовании OCR",
text=text,
language=language,
image=image,
ocr=True)
return coordinates
else:
try:
return self.get_element(locator={'text': text, 'displayed': 'true', 'enabled': 'true'},
contains=contains).get_coordinates()
except Exception as error:
raise GetTextCoordinatesError(message=f"""
Ошибка при попытке найти координаты изображения с использованием поиска по DOM: {error}""",
text=text,
contains=contains,
ocr=False,
original_exception=error) from error
# DOM
def get_element_contains(self,
) -> Any:
"""
Возвращает элемент содержащий определенный элемент.
Не реализован.
"""
raise NotImplementedError("Метод еще не реализован.") # TODO implement
def get_elements_contains(self,
) -> Any:
"""
Возвращает элементы содержащие определенный(е) элемент(ы).
Не реализован.
"""
raise NotImplementedError("Метод еще не реализован.") # TODO implement
# FIXME отладить, работает недостаточно стабильно в боевых условиях
def find_and_get_element(self,
locator: Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str],
timeout: int = 10,
tries: int = 3,
contains: bool = True
) -> Union[WebElementExtended, None]:
"""
Ищет элемент на странице и возвращает его. Если элемент не найден, метод прокручивает
все прокручиваемые элементы и повторяет попытку поиска указанное количество попыток.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str]):
Определяет локатор элемента.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор,
например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
str - путь до изображения. Будет искать изображение, вычислять координаты и
искать в DOM ближайший к этим координатам элемент
timeout (int): Максимальное время ожидания для поиска элемента в секундах. По умолчанию 10 секунд.
tries (int): Количество попыток прокрутки и поиска элемента. По умолчанию 3 попытки.
contains (bool): Искать строгое соответствие или вхождение текста.
Только для поиска по словарю с аргументом 'text'
Returns:
WebElementExtended или None: Возвращает найденный элемент или None, если элемент не найден
после всех попыток.
Raises:
ValueError: Возникает, если элемент не найден. Исключение вызывается внутренним методом get_element.
"""
try:
if self.is_element_within_screen(locator=locator, timeout=1, contains=contains):
try:
return self.get_element(locator=locator, timeout_elem=timeout, contains=contains)
except GetElementError as error:
raise FindAndGetElementError(message="""
Не удалось получить элемент (несмотря на то, что он обнаружен на экране)""",
locator=locator,
timeout=timeout,
tries=tries,
contains=contains,
original_exception=error) from error
recyclers = self.get_elements(locator={'scrollable': 'true', 'enabled': 'true', 'displayed': 'true'})
if recyclers is None:
raise FindAndGetElementError(message="Не удалось обнаружить прокручиваемые элементы на экране",
locator=locator,
timeout=timeout,
tries=tries,
contains=contains)
for i in range(tries):
for recycler in recyclers:
if recycler.scroll_until_find(locator=locator, contains=contains) is not None:
try:
return self.get_element(locator=locator, timeout_elem=timeout, contains=contains)
except GetElementError as error:
raise FindAndGetElementError(message="Не удалось извлечь элемент",
locator=locator,
timeout=timeout,
tries=tries,
contains=contains,
original_exception=error) from error
return None
except Exception as error:
raise FindAndGetElementError(message=f"Ошибка при попытке найти и извлечь элемент: {error}",
locator=locator,
timeout=timeout,
tries=tries,
contains=contains,
original_exception=error) from error
def is_element_within_screen(self,
locator: Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str],
timeout: int = 10,
contains: bool = True
) -> bool:
"""
Метод проверяет, находится ли заданный элемент на видимом экране.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str]):
Определяет локатор элемента.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор,
например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
str - путь до изображения. Будет искать изображение, вычислять координаты и
искать в DOM ближайший к этим координатам элемент
timeout (int): Время ожидания элемента. Значение по умолчанию: 10.
contains (bool): Искать строгое соответствие или вхождение текста.
Только для поиска по словарю с аргументом 'text'
Returns:
bool: True, если элемент находится на экране, False, если нет.
Note:
Проверяет атрибут: 'displayed'.
"""
try:
return self._is_element_within_screen(locator=locator, timeout=timeout, contains=contains)
except Exception as error:
raise IsElementWithinScreenError(message=f"""
Ошибка при проверке, находится ли элемент на видимом экране: {error}""",
locator=locator,
timeout=timeout,
contains=contains,
original_exception=error) from error
def is_text_on_screen(self,
text: str,
language: str = 'rus',
ocr: bool = True,
contains: bool = True
) -> bool:
"""
Проверяет, присутствует ли заданный текст на экране.
Если ocr=True:
Распознавание текста производит с помощью библиотеки pytesseract.
Если ocr=False:
Производится поиск элемента по xpath.
Аргументы:
- text (str): Текст, который нужно найти на экране.
- ocr (bool): Производить поиск по изображению или DOM.
- language (str): Язык распознавания текста. Значение по умолчанию: 'rus'.
- contains (bool): Только для ocr=False. Допускает фрагмент текста
Возвращает:
- bool: True, если заданный текст найден на экране. False в противном случае.
"""
try:
if ocr:
return self.helper.is_text_on_ocr_screen(text=text, language=language)
return self._is_element_within_screen(locator={'text': text}, contains=contains)
except Exception as error:
raise IsTextOnScreenError(message=f"""
Ошибка при проверке, присутствует ли заданный текст на экране: {error}""",
text=text,
language=language,
ocr=ocr,
contains=contains,
original_exception=error) from error
def is_image_on_the_screen(self,
image: Union[bytes, np.ndarray, Image.Image, str],
threshold: float = 0.9,
) -> bool:
"""
Сравнивает, присутствует ли заданное изображение на экране.
Args:
image (Union[bytes, np.ndarray, Image.Image, str]): Изображение для поиска на экране.
Может быть в формате байтов, массива numpy, объекта Image.Image или строки с путем до файла.
threshold (float): Пороговое значение схожести части изображения со снимком экрана.
Returns:
bool: Возвращает `True`, если изображение найдено на экране, иначе `False`.
Raises:
cv2.error: Ошибки, связанные с OpenCV.
AssertionError: Ошибки, связанные с неверными размерами изображений.
Exception: Остальные исключения.
"""
try:
return self.helper.is_image_on_the_screen(image=image, threshold=threshold)
except Exception as error:
raise IsImageOnScreenError(message=f"""
Ошибка при проверке, присутствует ли заданное изображение на экране: {error}""",
image=image,
threshold=threshold,
original_exception=error) from error
def tap(self,
locator: Union[Tuple[str, str], WebElementExtended, WebElement, Dict[str, str], str] = None,
x: int = None,
y: int = None,
image: Union[bytes, np.ndarray, Image.Image, str] = None,
duration: Optional[int] = None,
timeout: int = 5,
) -> 'AppiumExtended':
"""
Выполняет тап по заданным координатам, элементу или изображению на экране.
Args:
locator (Union[Tuple[str, str], WebElementExtended, WebElement, Dict[str, str], str], optional):
Определяет локатор элемента.
-Tuple: - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор, например ("id", "android.widget.ProgressBar").
-Dict: - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
-str: - путь до изображения. Будет искать изображение, вычислять координаты и
искать в DOM ближайший к этим координатам элемент
Применяется, если image = None.
x (int, optional): Координата X для тапа. Используется, если `locator` не указан.
y (int, optional): Координата Y для тапа. Используется, если `locator` не указан.
image (Union[bytes, np.ndarray, Image.Image, str], optional):
Изображение, по которому нужно тапнуть (в центр). Используется, если `locator` и координаты не указаны.
duration (int, optional): Длительность тапа в миллисекундах.
timeout (int): Максимальное время ожидания для поиска элемента или изображения.
Usages:
tap(locator=("id", "some_id"))
tap(x=50, y=50)
tap(image="path/to/image.png", duration=3)
Returns:
AppiumExtended: Возвращает экземпляр класса AppiumExtended (self).
Raises:
AssertionError: Если тап не удался.
"""
try:
if locator is not None:
# Извлечение координат
x, y = self._extract_point_coordinates_by_typing(locator)
if image is not None:
start_time = time.time()
while not self.is_image_on_the_screen(image=image) and time.time() - start_time < timeout:
time.sleep(1)
# Извлечение координат
x, y = self._extract_point_coordinates_by_typing(image)
if not self._tap(x=x, y=y, duration=duration):
raise TapError(message="Tap не удался",
locator=locator,
x=x, y=y,
image=image,
duration=duration,
timeout=timeout)
return cast('AppiumExtended', self)
except Exception as error:
raise TapError(message=f"Ошибка при выполнении tap: {error}",
locator=locator,
x=x,
y=y,
image=image,
duration=duration,
timeout=timeout,
original_exception=error) from error
# SWIPE
def swipe(self,
start_position: Union[
Tuple[int, int], str, bytes, np.ndarray, Image.Image, WebElement, WebElementExtended, Tuple[str, str],
Dict[str, str]],
end_position: Optional[Union[
Tuple[int, int], str, bytes, np.ndarray, Image.Image, WebElement, WebElementExtended, Tuple[str, str],
Dict[str, str]]] = None,
direction: Optional[int] = None,
distance: Optional[int] = None,
duration: Optional[int] = 0,
) -> 'AppiumExtended':
"""
Выполняет свайп (перетаскивание) элемента или изображения на экране.
Args:
start_position: Позиция начала свайпа. Может быть задана в различных форматах:
- Если start_position является кортежем и оба его элемента являются строками, то он представляет собой
локатор элемента. Например ('id', 'elementId'). В этом случае будет выполнен поиск элемента и используется его позиция.
- Если start_position является словарем, то считается, что это локатор элемента, основанный на атрибутах.
Например, {'text': 'some text'} или {'class': 'SomeClass', 'visible': 'true'}. В этом случае будет
выполнен поиск элемента по указанным атрибутам, и используется его позиция.
- Если start_position является экземпляром класса WebElement или WebElementExtended, то используется его
позиция.
- Если start_position является строкой, массивом байтов (bytes), массивом NumPy (np.ndarray) или объектом
класса Image.Image, то считается, что это изображение. В этом случае будет вычислен центр изображения и
используется его позиция.
- Если start_position является кортежем, и оба его элемента являются целыми числами, то считается, что это
координаты в формате (x_coordinate, y_coordinate).
end_position: (Optional) Позиция конца свайпа. Принимает те же форматы, что и start_position.
direction: (Optional) Направление свайпа, в градусах.
distance: (Optional) Расстояние свайпа, в пикселях.
duration: (Optional) Продолжительность свайпа, в миллисекундах.
Usages:
- swipe(start_position=(100, 100), end_position=(200, 200))
- swipe(start_position=('id', 'elementId'), direction=90, distance=100)
Returns:
Возвращает экземпляр класса AppiumExtended (self).
Notes:
- В качестве конечной позиции свайпа должен быть указан end_position или пара direction, distance.
- str принимается как путь к изображению на экране и вычисляется его центр, а не как локатор элемента.
"""
try:
# Извлечение координат начальной точки свайпа
start_x, start_y = self._extract_point_coordinates_by_typing(start_position)
if end_position is not None:
# Извлечение координат конечной точки свайпа
end_x, end_y = self._extract_point_coordinates_by_typing(end_position)
else:
# Извлечение координат конечной точки свайпа на основе направления и расстояния
end_x, end_y = self._extract_point_coordinates_by_direction(direction, distance, start_x, start_y,
screen_resolution=self.terminal.get_screen_resolution())
# Выполнение свайпа
if not self._swipe(start_x=start_x, start_y=start_y,
end_x=end_x, end_y=end_y,
duration=duration):
raise SwipeError(message=f"Не удалось выполнить свайп",
start_position=start_position,
end_position=end_position,
direction=duration,
distance=distance,
duration=duration)
# Возвращаем экземпляр класса appium_extended
return cast('AppiumExtended', self)
except Exception as error:
raise SwipeError(message=f"Ошибка при попытке выполнения свайпа: {error}",
start_position=start_position,
end_position=end_position,
direction=duration,
distance=distance,
duration=duration,
original_exception=error) from error
def swipe_right_to_left(self) -> 'AppiumExtended':
"""
Выполняет свайп (нажать, провести, отпустить) справа налево по горизонтальной оси экрана.
Args:
Метод не принимает аргументов.
Usages:
app.swipe_right_to_left()
Returns:
AppiumExtended: Возвращает экземпляр текущего объекта для возможности цепочного вызова методов.
Raises:
None: Метод не вызывает исключений, но внутренние методы (см. swipe), которые он вызывает, могут вызывать исключения.
Notes:
Этот метод использует текущее разрешение экрана для определения начальной и конечной точек свайпа.
Свайп начинается с 90% ширины экрана и заканчивается на 10% ширины экрана, сохраняя при этом
вертикальную координату на уровне 50% от высоты экрана.
"""
window_size = self.terminal.get_screen_resolution()
width = window_size[0]
height = window_size[1]
left = int(width * 0.1)
right = int(width * 0.9)
self.swipe(start_position=(right, height // 2),
end_position=(left, height // 2))
# Возвращаем экземпляр класса appium_extended
return cast('AppiumExtended', self)
def swipe_left_to_right(self) -> 'AppiumExtended':
"""
Выполняет свайп с левой стороны экрана на правую по горизонтальной оси.
Args:
Метод не принимает аргументов.
Usages:
app.swipe_left_to_right()
Returns:
AppiumExtended: Возвращает экземпляр текущего объекта для возможности цепочного вызова методов.
Raises:
None: Метод не вызывает исключений, но внутренние методы, которые он вызывает, могут вызывать исключения.
Notes:
Свайп начинается с 10% ширины экрана и заканчивается на 90% ширины экрана, сохраняя вертикальную координату на уровне 50% от высоты экрана.
"""
window_size = self.terminal.get_screen_resolution()
width = window_size[0]
height = window_size[1]
left = int(width * 0.1)
right = int(width * 0.9)
self.swipe(start_position=(left, height // 2),
end_position=(right, height // 2))
# Возвращаем экземпляр класса appium_extended
return cast('AppiumExtended', self)
def swipe_top_to_bottom(self) -> 'AppiumExtended':
"""
Выполняет свайп сверху вниз по вертикальной оси экрана.
Args:
Метод не принимает аргументов.
Usages:
app.swipe_top_to_bottom()
Returns:
AppiumExtended: Возвращает экземпляр текущего объекта для возможности цепочного вызова методов.
Raises:
None: Метод не вызывает исключений, но внутренние методы, которые он вызывает, могут вызывать исключения.
Notes:
Свайп начинается с 10% высоты экрана и заканчивается на 90% высоты экрана, сохраняя горизонтальную координату на уровне 50% от ширины экрана.
"""
window_size = self.terminal.get_screen_resolution()
height = window_size[1]
top = int(height * 0.1)
bottom = int(height * 0.9)
self.swipe(start_position=(top, height // 2),
end_position=(bottom, height // 2))
# Возвращаем экземпляр класса appium_extended
return cast('AppiumExtended', self)
def swipe_bottom_to_top(self) -> 'AppiumExtended':
"""
Выполняет свайп снизу вверх по вертикальной оси экрана.
Args:
Метод не принимает аргументов.
Usages:
app.swipe_bottom_to_top()
Returns:
AppiumExtended: Возвращает экземпляр текущего объекта для возможности цепочного вызова методов.
Raises:
None: Метод не вызывает исключений, но внутренние методы, которые он вызывает, могут вызывать исключения.
Notes:
Свайп начинается с 90% высоты экрана и заканчивается на 10% высоты экрана, сохраняя горизонтальную координату на уровне 50% от ширины экрана.
"""
window_size = self.terminal.get_screen_resolution()
height = window_size[1]
top = int(height * 0.1)
bottom = int(height * 0.9)
self.swipe(start_position=(bottom, height // 2),
end_position=(top, height // 2))
# Возвращаем экземпляр класса appium_extended
return cast('AppiumExtended', self)
# WAIT
def wait_for(self,
locator: Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str,
List[Tuple[str, str]], List[WebElement], List['WebElementExtended'], List[Dict[str, str]], List[
str]] = None,
image: Union[bytes, np.ndarray, Image.Image, str,
List[bytes], List[np.ndarray], List[Image.Image], List[str]] = None,
timeout: int = 10,
contains: bool = True,
full_image: Union[bytes, np.ndarray, Image.Image, str] = None,
) -> 'AppiumExtended':
"""
Ожидает появления на экране указанного локатора или изображения.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict, str, List], optional):
- Tuple: локатор в виде кортежа из двух строковых элементов, где первый это стратегия поиска, а второй это селектор, например ("id", "android.widget.ProgressBar").
- Dict: локатор в виде словаря {'text': 'foo', 'displayed': 'true', 'enabled': 'true'}.
- str: путь до изображения.
- List: список из локаторов. Будет ожидание всех элементов из списка.
По умолчанию None.
image (Union[bytes, np.ndarray, Image.Image, str, List], optional):
- bytes: изображение в формате байтов.
- np.ndarray: изображение в формате массива NumPy.
- Image.Image: изображение в формате Image (PIL/Pillow).
- str: путь до файла с изображением.
По умолчанию None.
timeout (int, optional): Максимальное время ожидания в секундах. По умолчанию 10.
contains (bool, optional): Если True, проверяет, содержит ли элемент указанный локатор.
По умолчанию True.
Usages:
app.wait_for(locator=("id", "android.widget.ProgressBar"), timeout=5)
app.wait_for(locator={'text': 'foo', 'displayed': 'true', 'enabled': 'true'})
app.wait_for(image="path/to/image.png", timeout=10)
app.wait_for(locator=[("id", "element1"), ("name", "element2")], timeout=5)
app.wait_for(image=["path/to/image1.png", "path/to/image2.png"], timeout=10)
Returns:
bool: True, если элементы или изображения найдены в течение заданного времени, иначе False.
Raises:
None: Метод не вызывает исключений.
Notes:
- Метод использует внутренние функции для поиска элементов и изображений.
- Параметр `contains` используется только при поиске по локатору.
"""
try:
if not self._wait_for(locator=locator, image=image, timeout=timeout, contains=contains):
raise WaitForError(message="Элемент или изображение не появились на экране в течение заданного времени",
locator=locator,
image=image,
timeout=timeout,
contains=contains)
return cast('AppiumExtended', self)
except Exception as error:
raise WaitForError(message=f"""
Ошибка ожидания элемента или изображения на экране в течение заданного времени {error}""",
locator=locator,
image=image,
timeout=timeout,
contains=contains,
original_exception=error) from error
def wait_for_not(self,
locator: Union[Tuple[str, str], WebElement, 'WebElementExtended', Dict[str, str], str,
List[Tuple[str, str]], List[WebElement], List['WebElementExtended'], List[Dict[str, str]], List[
str]] = None,
image: Union[bytes, np.ndarray, Image.Image, str,
List[bytes], List[np.ndarray], List[Image.Image], List[str]] = None,
timeout: int = 10,
contains: bool = True,
) -> 'AppiumExtended':
"""
Ожидает исчезновения указанного локатора или изображения с экрана.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict, str, List], optional):
- Tuple: локатор в виде кортежа из двух строковых элементов, где первый это стратегия поиска, а второй это селектор, например ("id", "android.widget.ProgressBar").
- Dict: локатор в виде словаря {'text': 'foo', 'displayed': 'true', 'enabled': 'true'}.
- str: путь до изображения.
- List: список из локаторов. Будет ожидание всех элементов из списка.
По умолчанию None.
image (Union[bytes, np.ndarray, Image.Image, str, List], optional):
- bytes: изображение в формате байтов.
- np.ndarray: изображение в формате массива NumPy.
- Image.Image: изображение в формате Image (PIL/Pillow).
- str: путь до файла с изображением.
По умолчанию None.
timeout (int, optional): Максимальное время ожидания в секундах. По умолчанию 10.
contains (bool, optional): Если True, проверяет, содержит ли элемент указанный локатор.
По умолчанию True.
Usages:
app.wait_for_not(locator=("id", "android.widget.ProgressBar"), timeout=5)
app.wait_for_not(locator={'text': 'foo', 'displayed': 'true', 'enabled': 'true'})
app.wait_for_not(image="path/to/image.png", timeout=10)
app.wait_for_not(locator=[("id", "element1"), ("name", "element2")], timeout=5)
app.wait_for_not(image=["path/to/image1.png", "path/to/image2.png"], timeout=10)
Returns:
AppiumExtended: Возвращает экземпляр текущего объекта для возможности цепочного вызова методов.
Raises:
Метод не вызывает исключений.
Notes:
- Метод использует внутренние функции для поиска элементов и изображений.
- Параметр `contains` используется только при поиске по локатору.
"""
try:
if not self._wait_for_not(locator=locator, image=image, timeout=timeout, contains=contains):
raise WaitForNotError(message="Элемент или изображение не исчезли в течение заданного времени",
locator=locator,
image=image,
timeout=timeout,
contains=contains)
return cast('AppiumExtended', self)
except Exception as error:
raise WaitForNotError(message=f"Ошибка при ожидании wait_for_not(): {error}",
locator=locator,
image=image,
timeout=timeout,
contains=contains,
original_exception=error) from error
def wait_return_true(self, method, timeout: int = 10) -> 'AppiumExtended':
try:
self._wait_return_true(method=method, timeout=timeout)
return cast('AppiumExtended', self)
except Exception as error:
raise WaitReturnTrueError(message=f"Ошибка ожидания возврата True от метода: {error}",
method=method,
timeout=timeout,
original_exception=error) from error
# OTHER
def draw_by_coordinates(self,
image: Union[bytes, str, Image.Image, np.ndarray] = None,
coordinates: Tuple[int, int, int, int] = None,
top_left: Tuple[int, int] = None,
bottom_right: Tuple[int, int] = None,
path: str = None,
) -> 'AppiumExtended':
"""
Рисует прямоугольник на предоставленном изображении или снимке экрана с помощью драйвера.
Args:
image (Union[bytes, str, Image.Image, np.ndarray], optional): Изображение для рисования. По умолчанию None.
coordinates (Tuple[int, int, int, int], optional): Координаты прямоугольника (x1, y1, x2, y2).
По умолчанию None.
top_left (Tuple[int, int], optional): Верхняя левая точка прямоугольника. По умолчанию None.
bottom_right (Tuple[int, int], optional): Нижняя правая точка прямоугольника. По умолчанию None.
path (str, optional): Путь для сохранения изображения. По умолчанию None.
Usages:
draw_by_coordinates(image=image_bytes, coordinates=(10, 20, 30, 40), path='path/to/save/image.png')
draw_by_coordinates(top_left=(10, 20), bottom_right=(30, 40))
Returns:
bool: True, если операция выполнена успешно, иначе False.
Raises:
WebDriverException: Если возникают проблемы с WebDriver.
cv2.error: Если возникают проблемы с OpenCV.
Notes:
- Если изображение не предоставлено, будет использован текущий снимок экрана.
- Если не указаны верхняя левая и нижняя правая точки, будут использованы координаты.
"""
try:
assert self.helper.draw_by_coordinates(image=image,
coordinates=coordinates,
top_left=top_left,
bottom_right=bottom_right,
path=path)
return cast('AppiumExtended', self)
except Exception as error:
raise DrawByCoordinatesError(message=f"Не удалось нарисовать прямоугольник на изображении: {error}",
coordinates=coordinates,
top_left=top_left,
bottom_right=bottom_right,
path=path,
original_exception=error) from error
def save_screenshot(self, path: str = '', filename: str = 'screenshot.png') -> 'AppiumExtended':
"""
Сохраняет скриншот экрана в указанный файл.
Args:
path (str, optional): Путь к директории, где будет сохранен скриншот. По умолчанию пустая строка, что означает текущую директорию.
filename (str, optional): Имя файла, в который будет сохранен скриншот. По умолчанию 'screenshot.png'.
Usages:
save_screenshot(path='/path/to/save', filename='my_screenshot.png')
save_screenshot(filename='another_screenshot.png')
save_screenshot()
Returns:
AppiumExtended (self).
Raises:
Exception: В случае, если возникают проблемы при сохранении скриншота.
Notes:
- Если путь не указан, скриншот будет сохранен в текущей директории.
- Если имя файла не указано, будет использовано имя 'screenshot.png'.
"""
try:
assert self.helper.save_screenshot(path=path, filename=filename)
return cast('AppiumExtended', self)
except Exception as error:
raise SaveScreenshotError(message=f"Не удалось сохранить скриншот: {error}",
path=path,
filename=filename,
original_exception=error) from error
# PRIVATE
def _extract_point_coordinates_by_typing(self,
position:
Union[Tuple[int, int], str, bytes, np.ndarray, Image.Image,
Tuple[str, str], Dict, WebElement, WebElementExtended]
) -> Tuple[int, int]:
"""
Извлекает координаты точки на основе типа переданной позиции.
Args:
position (Union[Tuple[int, int], str, bytes, np.ndarray, Image.Image, Tuple[str, str],
Dict, WebElement, WebElementExtended]):
- Позиция, для которой нужно извлечь координаты.
- Либо локатор элемента, либо изображение, либо кортеж из координат.
Usages:
_extract_point_coordinates_by_typing((100, 200))
_extract_point_coordinates_by_typing("path/to/image.png")
_extract_point_coordinates_by_typing({"id": "some_id"})
_extract_point_coordinates_by_typing(WebElement)
Returns:
Tuple[int, int]: Кортеж координат точки, в формате (x, y).
Notes:
- Метод использует различные внутренние функции для вычисления координат в
зависимости от типа входного параметра.
"""
try:
x, y = 0, 0
# Вычисление позиции начала свайпа
if (isinstance(position, Tuple) and
isinstance(position[0], int) and
isinstance(position[1], int)):
# Если position является кортежем с двумя целыми числами, то считаем, что это координаты
x, y = position
elif (isinstance(position, Tuple) and
isinstance(position[0], str) and
isinstance(position[1], str)) or \
isinstance(position, WebElement) or \
isinstance(position, WebElementExtended) or \
isinstance(position, Dict):
# Если position является кортежем с двумя строковыми элементами или экземпляром WebElement,
# WebElementExtended или словарем, то получаем координаты центра элемента
x, y = utils.calculate_center_of_coordinates(
self.get_element(locator=position).get_coordinates())
elif isinstance(position, (bytes, np.ndarray, Image.Image, str)):
# Если position является строкой, байтами, массивом NumPy или объектом Image.Image,
# то получаем координаты центра изображения
x, y = utils.calculate_center_of_coordinates(
self.get_image_coordinates(image=position))
return x, y
except Exception as error:
raise ExtractPointCoordinatesByTypingError(
message=f"Не удалось извлечь координаты точки на основе типа переданной позиции: {error}",
position=position,
original_exception=error) from error
@staticmethod
def _extract_point_coordinates_by_direction(direction: int, distance: int,
start_x: int, start_y: int,
screen_resolution: tuple
) -> Tuple[int, int]:
"""
Извлекает координаты точки на заданном расстоянии и в заданном направлении относительно начальных координат.
Параметры:
direction (str): Направление движения в пределах 360 градусов.
distance (int): Расстояние, на которое нужно переместиться относительно начальных координат в пикселях.
start_x (int): Начальная координата X.
start_y (int): Начальная координата Y.
Возвращает:
Tuple[int, int]: Координаты конечной точки в формате (x, y).
"""
try:
width = screen_resolution[0]
height = screen_resolution[1]
end_x, end_y = utils.find_coordinates_by_vector(width=width, height=height,
direction=direction, distance=distance,
start_x=start_x, start_y=start_y)
return end_x, end_y
except Exception as error:
raise ExtractPointCoordinatesError(message=f"Не удалось извлечь координаты точки: {error}",
direction=direction,
distance=distance,
start_x=start_x,
start_y=start_y,
screen_resolution=screen_resolution,
original_exception=error) from error
def get_screenshot_as_base64_decoded(self) -> bytes:
"""
Получает скриншот экрана, кодирует его в формате Base64, а затем декодирует в байты.
Args:
Метод не принимает аргументов.
Usages:
screenshot_bytes = self._get_screenshot_as_base64_decoded()
Returns:
bytes: Декодированные байты скриншота.
Notes:
- Этот метод предназначен для внутреннего использования и может быть вызван другими методами класса.
- Скриншот возвращается в формате PNG.
- Исходный скриншот получается в формате Base64, который затем кодируется в UTF-8 и декодируется обратно в байты.
"""
try:
return self._get_screenshot_as_base64_decoded()
except Exception as error:
raise GetScreenshotError(message=f"Не удалось получить скриншот: {error}",
original_exception=error) from error
def save_source(self, path: str = '', filename: str = 'source.xml'):
"""
Сохраняет исходный код страницы в указанной директории с указанным именем файла.
Args:
path (str, optional): Путь к директории, в которой будет сохранен файл. По умолчанию пустая строка, что означает текущую директорию.
filename (str, optional): Имя файла, в котором будет сохранен исходный код. По умолчанию 'source.xml'.
Usages:
save_source()
save_source(path='some/directory')
save_source(filename='another_name.xml')
save_source(path='some/directory', filename='another_name.xml')
Returns:
bool: True, если исходный код успешно сохранен. False, если произошла ошибка.
Notes:
- Метод использует встроенный метод драйвера `page_source` для получения исходного кода страницы.
- Исходный код сохраняется в формате XML.
"""
try:
source = self.driver.page_source
path_to_file = os.path.join(path, filename)
with open(path_to_file, "wb") as f:
f.write(source.encode('utf-8'))
except Exception as error:
raise SaveSourceError(message="Не удалось сохранить исходный код страницы",
path=path,
filename=filename,
original_exception=error) from error
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended/appium_extended.py
|
appium_extended.py
|
import logging
import json
import time
from appium import webdriver
from appium_extended_helpers.appium_helpers import AppiumHelpers
from appium_extended_server.appium_server import AppiumServer
from appium_extended_terminal.terminal import Terminal
from appium_extended_terminal.aapt import Aapt
from appium_extended_terminal.adb import Adb
from appium.webdriver.webdriver import WebDriver
class AppiumBase:
"""
Класс работы с Appium.
Обеспечивает подключение к устройству
"""
def __init__(self, logger: logging.Logger = None):
self.server_log_level: str = None
self.server_port: int = None
self.server_ip: str = None
self.server: AppiumServer = None
self.logger = logger
self.driver: WebDriver = None
self.terminal: Terminal = None
self.session_id: str = None
self.helper: AppiumHelpers = None
self.keep_alive_server: bool = True
self.aapt = Aapt()
self.adb = Adb()
aapt_logger = logging.getLogger('aapt')
aapt_logger.setLevel(self.logger.level)
adb_logger = logging.getLogger('adb')
adb_logger.setLevel(self.logger.level)
def connect(self,
capabilities: dict,
server_ip: str = '127.0.0.1',
server_port: int = 4723,
server_log_level: str = 'error',
remote: bool = False,
keep_alive_server: bool = True) -> None:
"""
Подключение к устройству через сервер Appium.
Args:
capabilities (dict): Словарь с возможностями для подключения к устройству.
server_ip (str, optional): IP-адрес сервера Appium. По умолчанию '127.0.0.1'.
server_port (int, optional): Порт сервера Appium. По умолчанию 4723.
server_log_level (str, optional): Уровень логирования сервера. По умолчанию 'error'.
remote (bool, optional): Флаг для удаленного подключения. По умолчанию False.
keep_alive_server (bool, optional): Флаг, оставлять ли сервер работающим после отключения
(только при remote=False). По умолчанию True.
Usages:
app = AppiumExtended(logger=logger, log_level=logging.INFO)
capabilities = {
"platformName": "android",
"appium:automationName": "uiautomator2",
"appium:deviceName": app.adb.get_device_model(),
"appium:udid": app.adb.get_device_uuid(),
}
app.connect(capabilities=capabilities,
server_ip='127.0.0.1',
server_port=4723,
server_log_level='info',
remote=False,
keep_alive_server=True)
# ИЛИ ЕСЛИ СЕРВЕР УДАЛЕННЫЙ:
app.connect(capabilities=capabilities,
server_ip='15.78.145.11',
server_port=4723,
server_log_level='error',
remote=True,
keep_alive_server=True)
Raises:
AppiumServerNotAliveException: Если сервер Appium не запущен или не отвечает.
WebDriverException: Если не удается установить соединение с WebDriver.
Returns:
None: Функция не возвращает ничего, но инициализирует драйвер и другие компоненты.
"""
self.server_ip = server_ip
self.server_port = server_port
self.server_log_level = server_log_level
self.keep_alive_server = keep_alive_server
self.server = AppiumServer(server_ip=self.server_ip,
server_port=self.server_port,
remote_log_level=self.server_log_level,
logger=self.logger)
self.logger.debug(
f"connect(capabilities {capabilities}")
if not remote:
# запускаем локальный сервер Аппиум
if not self.server.is_alive():
self.server.start()
time.sleep(10)
self.server.wait_until_alive()
url = f'http://{server_ip}:{str(server_port)}/wd/hub'
self.logger.info(f"Подключение к серверу: {url}")
self.driver = webdriver.Remote(command_executor=url,
desired_capabilities=capabilities,
keep_alive=True)
self.session_id = self.driver.session_id
# Инициализация объектов требующих драйвер
self.terminal = Terminal(driver=self.driver, logger=self.logger)
self.helper = AppiumHelpers(driver=self.driver, logger=self.logger)
app_capabilities = json.dumps(capabilities)
self.logger.info(f'Подключение установлено с параметрами: {str(app_capabilities)}, {url}')
self.logger.info(f'Сессия №: {self.driver.session_id}')
def disconnect(self) -> None:
"""
Отключение от устройства.
А также остановка сервера Appium, если флаг `keep_alive_server` установлен в False.
Usages:
app.disconnect()
Raises:
AppiumServerNotAliveException: Если сервер Appium не запущен или не отвечает.
WebDriverException: Если не удается завершить соединение с WebDriver.
Returns:
None: Функция не возвращает ничего, но завершает текущую сессию и останавливает сервер, если необходимо.
"""
if self.driver:
self.logger.debug(f"Отключение от сессии №: {self.driver.session_id}")
self.driver.quit()
self.driver = None
if not self.keep_alive_server:
self.server.stop()
def is_running(self) -> bool:
"""
Проверяет, запущен ли сервер Appium и активна ли текущая сессия.
Usages:
app.is_running()
Raises:
WebDriverException: Если не удается проверить статус сервера или сессии.
Returns:
bool: Возвращает True, если сервер и сессия активны, иначе False.
"""
return self.driver.is_running()
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended/appium_base.py
|
appium_base.py
|
import logging
from typing import Union, Dict, Tuple
from appium.webdriver import WebElement
from appium_extended.appium_get import AppiumGet
class AppiumIs(AppiumGet):
"""
Класс расширяющий Appium.
Обеспечивает ....
"""
def __init__(self, logger: logging.Logger):
super().__init__(logger=logger)
def _is_element_within_screen(
self,
locator: Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str],
timeout: int = 10,
contains: bool = True
) -> bool:
"""
Метод проверяет, находится ли заданный элемент на видимом экране.
Args:
locator (Union[Tuple, WebElement, 'WebElementExtended', Dict[str, str], str]):
Определяет локатор элемента.
Tuple - локатор в виде кортежа из двух строковых элементов,
где первый это стратегия поиска, а второй это селектор,
например ("id", "android.widget.ProgressBar").
Dict - локатор в виде словаря атрибутов и их значений искомого элемента,
например {'text': 'foo', 'displayed' : 'true', 'enabled': 'true'}.
str - путь до изображения
timeout (int): Время ожидания элемента. Значение по умолчанию: 10.
contains (bool): Искать строгое соответствие или вхождение текста.
Только для поиска по словарю с аргументом 'text'
Returns:
bool: True, если элемент находится на экране, False, если нет.
Note:
Проверяет атрибут: 'displayed'.
"""
screen_size = self.terminal.get_screen_resolution() # Получаем размеры экрана
screen_width = screen_size[0] # Ширина экрана
screen_height = screen_size[1] # Высота экрана
element = self._get_element(locator=locator, timeout_elem=timeout, contains=contains)
if element is None:
return False
if not element.get_attribute('displayed') == 'true':
# Если элемент не отображается на экране
return False
element_location = element.location # Получаем координаты элемента
element_size = element.size # Получаем размеры элемента
if (
element_location['y'] + element_size['height'] > screen_height or
element_location['x'] + element_size['width'] > screen_width or
element_location['y'] < 0 or
element_location['x'] < 0
):
# Если элемент находится за пределами экрана
return False
# Если элемент находится на экране
return True
|
AppiumExtended
|
/AppiumExtended-0.5.49b0-py3-none-any.whl/appium_extended/appium_is.py
|
appium_is.py
|
from xlrd import open_workbook
from appiumrunner.step_model import StepModel as model
class ExcelReader():
@staticmethod
def read_excel(excel_path):
reader = open_workbook(excel_path)
names = reader.sheet_names()
# 1. 读取步骤,以列表保存 {"login":[step1,step2]}
step_dict = {}
for name in names:
if name == 'data':
continue;
step_dict[name] = []
case_xls = reader.sheet_by_name(name)
for i in range(case_xls.nrows):
if i == 0: # 跳过表头
continue
smart_list = [] # 一个集合代表一个步骤
for j in range(case_xls.ncols):
smart_list.append(case_xls.cell(i, j).value)
mode = model()
mode.sort = smart_list[0]
mode.desc = smart_list[1]
mode.action = smart_list[2]
mode.searchType = smart_list[3]
mode.searchvalue = smart_list[4]
mode.searchindex = smart_list[5]
mode.validateSource = smart_list[6]
mode.validateAttr = smart_list[7]
mode.validateType = smart_list[8]
mode.validateData = smart_list[9]
step_dict[name].append(mode) # [mode1.model2 mode3 ]
# 2. 读取数据,以列表保存 {"login":[data1,data2]}
data_dict = {}
data_xls = reader.sheet_by_name("data")
for i in range(data_xls.nrows):
name = data_xls.cell(i, 0).value
data_dict[name] = []
for j in range(data_xls.ncols):
value = data_xls.cell(i, j).value.strip()
if (j == 0) or (value == ""):
continue
data_dict[name].append(eval(value))
# 3. 格式转变 [{name,desc,examples,steps}]
result = []
for case_name in list(step_dict.keys()):
if data_dict[case_name]:
data_list = data_dict[case_name]
num = 0
for data in data_list:
result.append({
"name": case_name,
"steps": step_dict[case_name],
"examples": data,
"desc": "{}_{}".format(case_name, num)
})
num += 1
else:
result.append({
"name": case_name,
"steps": step_dict[case_name],
"examples": {},
"desc": "{}_0".format(case_name)
})
return result
|
AppiumRunner
|
/AppiumRunner-0.0.1-py3-none-any.whl/appiumrunner/excel_reader.py
|
excel_reader.py
|
import operator
import time
import uuid
from selenium.common.exceptions import NoSuchElementException
def execute(driver, steps, data):
"""
执行用例
:param driver:
:param steps: 具体步骤
:param data: 数据项
:return: None
"""
for step in steps:
time.sleep(1)
# 无需元素的动作
if step.action == 'screenshot':
filename = uuid.uuid1().hex
r = driver.get_screenshot_as_file(
'static/screenshot/' + filename + '.png') # 截图
elif step.action == 'wait': # 等待
time_value = data[step.validateData]
time.sleep(int(time_value))
# WebDriverWait(driver, 10, 0.5).until(EC.visibility_of(element))
# driver.implicitly_wait(int(step.validateData))
elif step.action == 'end': # 步骤执行结束
driver.close_app()
time.sleep(1)
driver.launch_app()
continue
# 找到元素
element = None
try:
if step.searchType == 'find_elements_by_id':
element = getattr(driver, step.searchType)(
step.searchvalue)[step.searchIndex]
elif step.action == 'wait':
continue
else:
element = getattr(driver, step.searchType)(
step.searchvalue)
except NoSuchElementException:
print("找不到对应的元素,定位方式为:{},定位值为:{}".format(step.searchType, step.searchvalue))
assert not element is None, "元素没有定位到,不能为空!"
# 执行动作
if step.action == 'assert': # 断言
value = None
if step.validateSource == 'normal':
value = element.text # 获取内容
else:
value = getattr(element, 'get_attribute')(step.validateAttr)
assertResult = True # 断言结果
if step.validateType == 'contains': # 包含
assertResult = value.__contains__(
step.validateData)
elif step.validateType == 'equals': # 相同
assertResult = value == step.validateData
else:
assertResult = getattr(operator, step.validateType)(
float(value), float(step.validateData))
assert assertResult,"断言不通过"
elif step.action == 'send_keys': # 输入文字
keys_value = data[step.validateData]
getattr(element, step.action)(keys_value)
else:
getattr(element, step.action)()
|
AppiumRunner
|
/AppiumRunner-0.0.1-py3-none-any.whl/appiumrunner/executor.py
|
executor.py
|
Everything other than specific items mentioned below is:
Copyright (c) 2012, 2013 TortoiseLabs LLC
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
This software is provided 'as is' and without any warranty, express or
implied. In no event shall the authors be liable for any damages arising
from the use of this software.
The file "appliancekit/axml.py" is under the following:
Copyright (c) 2008 SystemInPlace (parser)
Copyright (c) 2012, 2013 TortoiseLabs LLC
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
This software is provided 'as is' and without any warranty, express or
implied. In no event shall the authors be liable for any damages arising
from the use of this software.
|
ApplianceKit-NG
|
/ApplianceKit-NG-0.6.2.tar.gz/ApplianceKit-NG-0.6.2/COPYING.md
|
COPYING.md
|
# appliancekit-ng
Copyright (c) 2012, 2013 TortoiseLabs, LLC.
This software is free but copyrighted, see COPYING.md for more details.
## motive
ApplianceKit has become unmaintainable. In addition, it depends on XML and exposes too much
implementation details in the Python-based core.
By using an intermediate representation between the XML and what actually happens, it is
possible to:
* Implement all logic for bringing up a distribution as data, by using a stack machine
to interpret the data.
* Add new distributions by writing specfiles for them instead of entirely new classes of
monolithic code in Python.
* Eventually transition away entirely from using AXML.
## intermediate representation
Most of what the ApplianceKit NG core does is:
* Translate XML into IR, for example an XML file might be translated into this high-level IR,
which will get compiled into lower-level IR.
```
{% extends "debian-6.spec" %}
{% set packages=['irssi'] %}
```
* Translate high-level IR into low-level IR using translation rules as described in the base
specfiles. You can use ak-compile or ak-compile-xml to view what the lowlevel IR parsetree
looks like.
* Compile a parse tree into bytecode and then run the bytecode to create the appliance
filesystem.
For more information on the IR language, see [ADL.md](ADL.md).
## requirements
* For Alpine: `apk-tools`.
* For Debian or Ubuntu: `debootstrap`.
* For CentOS, ScientificLinux, RHEL, openSUSE: `rinse`.
|
ApplianceKit-NG
|
/ApplianceKit-NG-0.6.2.tar.gz/ApplianceKit-NG-0.6.2/README.md
|
README.md
|
# Appliance Definition Language specification
ADL is an abstract parse-tree expressed in JSON. Each node in the tree has a mandatory
`operation` property. All other fields in the nodes are sub-properties.
A tree node looks like this:
```
{"operation": "runcmd", "chroot": true, "command": ["/bin/true"]}
```
We use [Jinja2](http://jinja.pocoo.org) as a preprocessor. This provides a useful macro
language, which we have built a framework around. The rest of this document concerns the
macro system and operations.
`base.spec` defines a root of an ADL parse tree, as well as some hooks for various phases
which may be reimplemented downstream by users of `base.spec`. Almost all ADL files should
derive from `base.spec`, which is declared by doing the following:
```
{% extends "base.spec" %}
```
## Operations
There are various operations which are implemented by the interpreter. They are:
* **runcmd**: Run a command inside or outside of a chroot. Takes two parameters, **chroot**
which is a boolean, and **command** which contains the arguments and command name.
* **render_template**: Renders a template and installs it to a location in the guest filesystem.
* **noop**: Skips this node in the parse tree.
## Phases
There are various phases which are implemented by `base.spec`. This allows for ADL files
built ontop of the framework provided by `base.spec` to weave only certain parse tree
nodes into the final parse tree representation based on what the end-user wants to do.
These phases are encapsulated inside `{% block %}` constructs.
The phases are:
* **bootstrap**: Commands to get an initial system installed. Things like `debootstrap` and
`pacstrap`. Perhaps ugly hacks involving rpm2cpio, but we will probably ship a helper script
for that.
* **packages**: Commands to install user specified packages or otherwise optional packages that
are not needed in the system when it's done with the **bootstrap** phase, but would be needed
to bring the system up on a hypervisor or under bare metal.
* **configuration**: Commands to set up the configuration of the appliance based on the `config`
object.
* **custom**: Any special commands that the specfile may wish to provide. Also could have additional
phases here.
* **cleanup**: Cleans up any changes done to the guest filesystem during **bootstrap**.
* **xentweaks**: Tweaks some config files for running under Xen, such as `/etc/inittab`.
|
ApplianceKit-NG
|
/ApplianceKit-NG-0.6.2.tar.gz/ApplianceKit-NG-0.6.2/ADL.md
|
ADL.md
|
from appliancekit import env
import os
import subprocess
import errno
import inspect
def mkdir_p(path):
if not os.path.exists(path):
os.makedirs(path)
class UnimplementedOperationException(Exception):
pass
class UnimplementedOperation(object):
def __init__(self, operation, **kwargs):
self.operation = operation
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "<{}: '{}'>".format(type(self).__name__, self.operation)
def visit(self, state):
raise UnimplementedOperationException(self.operation)
def property_dict(self):
return dict(inspect.getmembers(self, lambda x: not inspect.isroutine(x)))
class NoOpOperation(UnimplementedOperation):
'''An operation which does nothing.'''
def visit(self, state):
pass
class SetEnvOperation(UnimplementedOperation):
def __repr__(self):
return "<{}: '{}' = '{}'>".format(type(self).__name__, self.key, self.value)
def visit(self, state):
simulate = state.get('simulate', False)
print 'setenv', '{}={}'.format(self.key, self.value)
if simulate:
return
if self.value:
os.environ[self.key] = self.value
elif os.environ.has_key(self.key):
del os.environ[self.key]
class RunCmdOperation(UnimplementedOperation):
def __repr__(self):
return "<{}: '{}'{}>".format(type(self).__name__, self.command[0], " (chroot)" if self.chroot else "")
def visit(self, state):
simulate = state.get('simulate', False)
cmdline = list()
if self.chroot:
cmdline += ['chroot', state['chroot']]
for i in self.command:
st = i
for k, v in state.iteritems():
st = st.replace('$' + str(k), str(v))
cmdline.append(st)
print ' '.join(cmdline)
if simulate:
return
return subprocess.call(cmdline, close_fds=True)
class RenderTemplateOperation(UnimplementedOperation):
def __repr__(self):
return "<{}: '{}'>".format(type(self).__name__, self.template)
def visit(self, state):
simulate = state.get('simulate', False)
print 'RenderTemplate', '{} -> {}{}'.format(self.template, state['chroot'], self.target)
if simulate:
return
vars = state
vars.update(self.property_dict())
tmpl = env.get_template(self.template)
target = state['chroot'] + self.target
mkdir_p(os.path.dirname(target))
target_fd = open(target, 'w')
target_fd.write(tmpl.render(**vars))
target_fd.close()
class MkdirParentsOperation(UnimplementedOperation):
def __repr__(self):
return "<{}: '{}'>".format(type(self).__name__, self.path)
def visit(self, state):
simulate = state.get('simulate', False)
print 'MkdirParents', '{}{}'.format(state['chroot'], self.path)
if simulate:
return
target = state['chroot'] + self.path
mkdir_p(target)
class PostBackOperation(UnimplementedOperation):
def __repr__(self):
return "<{}: '{}'>".format(type(self).__name__, self.message)
def visit(self, state):
try:
import requests
requests.post(state['postbackuri'], data={
'status_pct': self.percent,
'status_msg': self.message
}, timeout=2.0)
except:
pass
optree = {
'noop': NoOpOperation,
'runcmd': RunCmdOperation,
'setenv': SetEnvOperation,
'render_template': RenderTemplateOperation,
'mkdir_p': MkdirParentsOperation,
'postback': PostBackOperation,
}
def compile_parsetree(parsetree):
'''Compiles a parse tree into bytecode.'''
lst = list()
for i in parsetree:
cons = lambda op: optree[op] if optree.has_key(op) else UnimplementedOperation
op = i.pop('operation', 'noop')
cstr = cons(op)
# Optimization: We can now remove noop operations, since they exist only to keep the
# IR layer happy. Blame JSON for requiring this hack.
if cstr == NoOpOperation: continue
lst.append(cstr(op, **i))
return lst
def interpret_parsetree(set, state):
'''Executes bytecode and returns the results of all computations.'''
return [i.visit(state) for i in set]
|
ApplianceKit-NG
|
/ApplianceKit-NG-0.6.2.tar.gz/ApplianceKit-NG-0.6.2/appliancekit/parsetree.py
|
parsetree.py
|
from appliancekit.compiler import compile_ir_string
import xml.parsers.expat
import urllib
import json
def get_appliancexml_from_xml_file(filepath):
"""Parse an XML file into an appliance config."""
currentTag = []
xmlConfig = {}
def startTag(name, attrs):
currentTag.append({'name': name, 'attrs': attrs})
def endTag(name):
currentTag.pop()
def getTagPath():
st = ""
for tag in currentTag:
if st != "":
st += ".%s" % tag['name']
else:
st = tag['name']
return st
def characterData(data):
if getTagPath() == "appliance.packagelist":
pass
elif getTagPath() == "appliance.packagelist.package":
try:
xmlConfig[ "appliance.packagelist" ].append(data)
except:
xmlConfig[ "appliance.packagelist" ] = [ data ]
elif getTagPath() == "appliance.scriptlet.preinstall":
try:
xmlConfig[ "appliance.scriptlet.preinstall" ].append(data)
except:
xmlConfig[ "appliance.scriptlet.preinstall" ] = [ data ]
elif getTagPath() == "appliance.scriptlet.postinstall":
try:
xmlConfig[ "appliance.scriptlet.postinstall" ].append(data)
except:
xmlConfig[ "appliance.scriptlet.postinstall" ] = [ data ]
else:
xmlConfig[ getTagPath() ] = data
f = urllib.URLopener().open(filepath)
data = f.read()
f.close()
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = startTag
p.EndElementHandler = endTag
p.CharacterDataHandler = characterData
p.Parse(data)
return xmlConfig
def get_appliance_config_from_xml_file(filepath):
xmlConfig = get_appliancexml_from_xml_file(filepath)
config = {}
try:
config['packageList'] = xmlConfig['appliance.packagelist']
except:
config['packageList'] = []
try:
config['scriptlet.preinstall'] = xmlConfig['appliance.scriptlet.preinstall']
except:
config['scriptlet.preinstall'] = []
try:
config['scriptlet.postinstall'] = xmlConfig['appliance.scriptlet.postinstall']
except:
config['scriptlet.postinstall'] = []
config['distribution'] = xmlConfig['appliance.distribution']
return config
def translate_axml_file(filepath):
axmlconfig = get_appliance_config_from_xml_file(filepath)
distMap = {
'lenny': 'debian-5',
'squeeze': 'debian-6',
'wheezy': 'debian-7',
'sid': 'debian-base',
'lucid': 'ubuntu-10.04',
}
def get_distname(name):
if distMap.has_key(name):
return distMap[name]
return name
trans = '{% extends "' + get_distname(axmlconfig['distribution']) + '.spec" %}\r\n'
if axmlconfig.has_key('packageList') and len(axmlconfig['packageList']) > 0:
trans += "{% " + "set packages={}".format(json.dumps(axmlconfig['packageList'])) + " %}\r\n"
return trans
def compile_axml_file(filepath, **kwargs):
trans_ir = translate_axml_file(filepath)
weaved_ir = compile_ir_string(trans_ir, **kwargs)
return json.loads(weaved_ir)
|
ApplianceKit-NG
|
/ApplianceKit-NG-0.6.2.tar.gz/ApplianceKit-NG-0.6.2/appliancekit/axml.py
|
axml.py
|
import sys
import logging
sys.path.append('../')
from PyQt5.QtWidgets import QDialog, QLabel, QComboBox, QPushButton, QApplication
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QStandardItemModel, QStandardItem
logger = logging.getLogger('client_dist')
# Диалог выбора контакта для добавления
class AddContactDialog(QDialog):
def __init__(self, transport, database):
super().__init__()
self.transport = transport
self.database = database
self.setFixedSize(350, 120)
self.setWindowTitle('Выберите контакт для добавления:')
# Удаляем диалог, если окно было закрыто преждевременно
self.setAttribute(Qt.WA_DeleteOnClose)
# Делаем это окно модальным (т.е. поверх других)
self.setModal(True)
self.selector_label = QLabel('Выберите контакт для добавления:', self)
self.selector_label.setFixedSize(200, 20)
self.selector_label.move(10, 0)
self.selector = QComboBox(self)
self.selector.setFixedSize(200, 20)
self.selector.move(10, 30)
self.btn_refresh = QPushButton('Обновить список', self)
self.btn_refresh.setFixedSize(100, 30)
self.btn_refresh.move(60, 60)
self.btn_ok = QPushButton('Добавить', self)
self.btn_ok.setFixedSize(100, 30)
self.btn_ok.move(230, 20)
self.btn_cancel = QPushButton('Отмена', self)
self.btn_cancel.setFixedSize(100, 30)
self.btn_cancel.move(230, 60)
self.btn_cancel.clicked.connect(self.close)
# Заполняем список возможных контактов
self.possible_contacts_update()
# Назначаем действие на кнопку обновить
self.btn_refresh.clicked.connect(self.update_possible_contacts)
# Заполняем список возможных контактов разницей между всеми пользователями и
def possible_contacts_update(self):
self.selector.clear()
# множества всех контактов и контактов клиента
contacts_list = set(self.database.get_contacts())
users_list = set(self.database.get_users())
# Удалим сами себя из списка пользователей, чтобы нельзя было добавить самого себя
users_list.remove(self.transport.username)
# Добавляем список возможных контактов
self.selector.addItems(users_list - contacts_list)
# Обновляет таблицу известных пользователей (забирает с сервера),
# затем содержимое предполагаемых контактов
def update_possible_contacts(self):
try:
self.transport.user_list_update()
except OSError:
pass
else:
logger.debug('Обновление списка пользователей с сервера выполнено')
self.possible_contacts_update()
if __name__ == '__main__':
app = QApplication(sys.argv)
from database import ClientDatabase
database = ClientDatabase('test1')
from transport import ClientTransport
transport = ClientTransport(7777, '127.0.0.1', database, 'test1')
window = AddContactDialog(transport, database)
window.show()
app.exec_()
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/client/add_contact.py
|
add_contact.py
|
import socket
import sys
import time
import logging
import json
import threading
import hashlib
import hmac
import binascii
from PyQt5.QtCore import pyqtSignal, QObject
sys.path.append('../')
from utils import *
from variables1 import *
from errors import ServerError
# Логер и объект блокировки для работы с сокетом.
logger = logging.getLogger('client_dist')
socket_lock = threading.Lock()
# Класс - Транспорт, отвечает за взаимодействие с сервером
class ClientTransport(threading.Thread, QObject):
# Сигналы новое сообщение и потеря соединения
new_message = pyqtSignal(dict)
message_205 = pyqtSignal()
connection_lost = pyqtSignal()
def __init__(self, port, ip_address, database, username, passwd, keys):
# Вызываем конструктор предка
threading.Thread.__init__(self)
QObject.__init__(self)
# Класс База данных - работа с базой
self.database = database
# Имя пользователя
self.username = username
# пароль
self.password = passwd
# Сокет для работы с сервером
self.transport = None
# Набор ключей для шифрования
self.keys = keys
# Устанавливаем соединение:
self.connection_init(port, ip_address)
# Обновляем таблицы известных пользователей и контактов
try:
self.user_list_update()
self.contacts_list_update()
except OSError as err:
if err.errno:
logger.critical(f'Потеряно соединение с сервером.')
raise ServerError('Потеряно соединение с сервером!')
logger.error('Timeout соединения при обновлении списков пользователей.')
except json.JSONDecodeError:
logger.critical(f'Потеряно соединение с сервером.')
raise ServerError('Потеряно соединение с сервером!')
# Флаг продолжения работы транспорта.
self.running = True
# Функция инициализации соединения с сервером
def connection_init(self, port, ip):
# Инициализация сокета и сообщение серверу о нашем появлении
self.transport = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Таймаут необходим для освобождения сокета.
self.transport.settimeout(5)
# Соединяемся, 5 попыток соединения, флаг успеха ставим в True если удалось
connected = False
for i in range(5):
logger.info(f'Попытка подключения №{i + 1}')
try:
self.transport.connect((ip, port))
except (OSError, ConnectionRefusedError):
pass
else:
connected = True
break
time.sleep(1)
# Если соединится не удалось - исключение
if not connected:
logger.critical('Не удалось установить соединение с сервером')
raise ServerError('Не удалось установить соединение с сервером')
logger.debug('Установлено соединение с сервером')
# процедура авторизации
# хэш пароля
passwd_bytes = self.password.encode('utf-8')
salt = self.username.lower().encode('utf-8')
passwd_hash = hashlib.pbkdf2_hmac('sha512', passwd_bytes, salt, 10000)
passwd_hash_string = binascii.hexlify(passwd_hash)
logger.debug(f'Passwd hash ready: {passwd_hash_string}')
# Получаем публичный ключ и декодируем его из байтов
pubkey = self.keys.publickey().export_key().decode('ascii')
with socket_lock:
presense = {
ACTION: PRESENCE,
TIME: time.time(),
USER: {
ACCOUNT_NAME: self.username,
PUBLIC_KEY: pubkey
}
}
logger.debug(f"Presense message = {presense}")
try:
send_message(self.transport, presense)
ans = get_message(self.transport)
logger.debug(f'Server response = {ans}.')
# Если сервер вернул ошибку, бросаем исключение.
if RESPONSE in ans:
if ans[RESPONSE] == 400:
raise ServerError(ans[ERROR])
elif ans[RESPONSE] == 511:
# Если всё нормально, то продолжаем процедуру авторизации
ans_data = ans[DATA]
hash = hmac.new(passwd_hash_string, ans_data.encode('utf-8'), 'MD5')
digest = hash.digest()
my_ans = RESPONSE_511
my_ans[DATA] = binascii.b2a_base64(digest).decode('ascii')
send_message(self.transport, my_ans)
self.process_server_ans(get_message(self.transport))
except (OSError, json.JSONDecodeError) as err:
logger.debug(f'Connection error.', exc_info=err)
raise ServerError('Сбой соединения в процессе авторизации.')
# # Если всё хорошо, сообщение об установке соединения.
# logger.info('Соединение с сервером успешно установлено.')
# Функция, генерирующая приветственное сообщение для сервера
# def create_presence(self):
# out = {
# ACTION: PRESENCE,
# TIME: time.time(),
# USER: {
# ACCOUNT_NAME: self.username
# }
# }
# logger.debug(f'Сформировано {PRESENCE} сообщение для пользователя {self.username}')
# return out
# Функция, обрабатывающая сообщения от сервера. Ничего не возвращает.
# Генерирует исключение при ошибке.
def process_server_ans(self, message):
'''Метод обработчик поступающих сообщений с сервера.'''
logger.debug(f'Разбор сообщения от сервера: {message}')
# Если это подтверждение чего-либо
if RESPONSE in message:
if message[RESPONSE] == 200:
return
elif message[RESPONSE] == 400:
raise ServerError(f'{message[ERROR]}')
elif message[RESPONSE] == 205:
self.user_list_update()
self.contacts_list_update()
self.message_205.emit()
else:
logger.error(
f'Принят неизвестный код подтверждения {message[RESPONSE]}')
# Если это сообщение от пользователя добавляем в базу, даём сигнал о
# новом сообщении
elif ACTION in message and message[ACTION] == MESSAGE and SENDER in message and DESTINATION in message \
and MESSAGE_TEXT in message and message[DESTINATION] == self.username:
logger.debug(
f'Получено сообщение от пользователя {message[SENDER]}:{message[MESSAGE_TEXT]}')
self.new_message.emit(message)
# Функция, обновляющая контакт - лист с сервера
def contacts_list_update(self):
'''Метод обновляющий с сервера список контактов.'''
self.database.contacts_clear()
logger.debug(f'Запрос контакт листа для пользователся {self.name}')
req = {
ACTION: GET_CONTACTS,
TIME: time.time(),
USER: self.username
}
logger.debug(f'Сформирован запрос {req}')
with socket_lock:
send_message(self.transport, req)
ans = get_message(self.transport)
logger.debug(f'Получен ответ {ans}')
if RESPONSE in ans and ans[RESPONSE] == 202:
for contact in ans[LIST_INFO]:
self.database.add_contact(contact)
else:
logger.error('Не удалось обновить список контактов.')
def user_list_update(self):
'''Метод обновляющий с сервера список пользователей.'''
logger.debug(f'Запрос списка известных пользователей {self.username}')
req = {
ACTION: USERS_REQUEST,
TIME: time.time(),
ACCOUNT_NAME: self.username
}
with socket_lock:
send_message(self.transport, req)
ans = get_message(self.transport)
if RESPONSE in ans and ans[RESPONSE] == 202:
self.database.add_users(ans[LIST_INFO])
else:
logger.error('Не удалось обновить список известных пользователей.')
def key_request(self, user):
'''Метод запрашивающий с сервера публичный ключ пользователя.'''
logger.debug(f'Запрос публичного ключа для {user}')
req = {
ACTION: PUBLIC_KEY_REQUEST,
TIME: time.time(),
ACCOUNT_NAME: user
}
with socket_lock:
send_message(self.transport, req)
ans = get_message(self.transport)
if RESPONSE in ans and ans[RESPONSE] == 511:
return ans[DATA]
else:
logger.error(f'Не удалось получить ключ собеседника{user}.')
# Функция сообщающая на сервер о добавлении нового контакта
def add_contact(self, contact):
'''Метод отправляющий на сервер сведения о добавлении контакта.'''
logger.debug(f'Создание контакта {contact}')
req = {
ACTION: ADD_CONTACT,
TIME: time.time(),
USER: self.username,
ACCOUNT_NAME: contact
}
with socket_lock:
send_message(self.transport, req)
self.process_server_ans(get_message(self.transport))
# Функция удаления клиента на сервере
def remove_contact(self, contact):
logger.debug(f'Удаление контакта {contact}')
req = {
ACTION: REMOVE_CONTACT,
TIME: time.time(),
USER: self.username,
ACCOUNT_NAME: contact
}
with socket_lock:
send_message(self.transport, req)
self.process_server_ans(get_message(self.transport))
# Функция закрытия соединения, отправляет сообщение о выходе.
def transport_shutdown(self):
'''Метод уведомляющий сервер о завершении работы клиента.'''
self.running = False
message = {
ACTION: EXIT,
TIME: time.time(),
ACCOUNT_NAME: self.username
}
with socket_lock:
try:
send_message(self.transport, message)
except OSError:
pass
logger.debug('Транспорт завершает работу.')
time.sleep(0.5)
# Функция отправки сообщения на сервер
def send_message(self, to, message):
'''Метод отправляющий на сервер сообщения для пользователя.'''
message_dict = {
ACTION: MESSAGE,
SENDER: self.username,
DESTINATION: to,
TIME: time.time(),
MESSAGE_TEXT: message
}
logger.debug(f'Сформирован словарь сообщения: {message_dict}')
# Необходимо дождаться освобождения сокета для отправки сообщения
with socket_lock:
send_message(self.transport, message_dict)
self.process_server_ans(get_message(self.transport))
logger.info(f'Отправлено сообщение для пользователя {to}')
def run(self):
'''Метод содержащий основной цикл работы транспортного потока.'''
logger.debug('Запущен процесс - приёмник собщений с сервера.')
while self.running:
# Отдыхаем секунду и снова пробуем захватить сокет.
# если не сделать тут задержку, то отправка может достаточно долго
# ждать освобождения сокета.
time.sleep(1)
message = None
with socket_lock:
try:
self.transport.settimeout(0.5)
message = get_message(self.transport)
except OSError as err:
if err.errno:
logger.critical(f'Потеряно соединение с сервером.')
self.running = False
self.connection_lost.emit()
# Проблемы с соединением
except (ConnectionError, ConnectionAbortedError, ConnectionResetError, json.JSONDecodeError, TypeError):
logger.debug(f'Потеряно соединение с сервером.')
self.running = False
self.connection_lost.emit()
finally:
self.transport.settimeout(5)
# Если сообщение получено, то вызываем функцию обработчик:
if message:
logger.debug(f'Принято сообщение с сервера: {message}')
self.process_server_ans(message)
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/client/transport.py
|
transport.py
|
import sys
import logging
sys.path.append('../')
from PyQt5.QtWidgets import QDialog, QLabel, QComboBox, QPushButton, QApplication
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QStandardItemModel, QStandardItem
# logger = logging.getLogger('client_dist')
logger = logging.getLogger('client')
# Диалог выбора контакта для удаления
class DelContactDialog(QDialog):
def __init__(self, database):
super().__init__()
self.database = database
self.setFixedSize(350, 120)
self.setWindowTitle('Выберите контакт для удаления:')
# Удаляем диалог, если окно было закрыто преждевременно
self.setAttribute(Qt.WA_DeleteOnClose)
# Делаем это окно модальным (т.е. поверх других)
self.setModal(True)
self.selector_label = QLabel('Выберите контакт для удаления:', self)
self.selector_label.setFixedSize(200, 20)
self.selector_label.move(10, 0)
self.selector = QComboBox(self)
self.selector.setFixedSize(200, 20)
self.selector.move(10, 30)
# заполнитель контактов для удаления
self.selector.addItems(sorted(self.database.get_contacts()))
self.btn_ok = QPushButton('Удалить', self)
self.btn_ok.setFixedSize(100, 30)
self.btn_ok.move(230, 20)
self.btn_cancel = QPushButton('Отмена', self)
self.btn_cancel.setFixedSize(100, 30)
self.btn_cancel.move(230, 60)
self.btn_cancel.clicked.connect(self.close)
if __name__ == '__main__':
app = QApplication(sys.argv)
from database import ClientDatabase
database = ClientDatabase('test1')
window = DelContactDialog(database)
# при подключении контакты удаляются, а затем добавляются с сервера
# поэтому для проверки сами вручную добавляем контакт для списка удаления
database.add_contact('test1')
database.add_contact('test2')
print(database.get_contacts())
window.selector.addItems(sorted(database.get_contacts()))
window.show()
app.exec_()
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/client/del_contact.py
|
del_contact.py
|
from PyQt5.QtWidgets import QMainWindow, qApp, QMessageBox, QApplication, QListView
from PyQt5.QtGui import QStandardItemModel, QStandardItem, QBrush, QColor
from PyQt5.QtCore import pyqtSlot, QEvent, Qt
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
import json
import logging
import base64
import sys
sys.path.append('../')
from client.main_window_conv import Ui_MainClientWindow
from client.add_contact import AddContactDialog
from client.del_contact import DelContactDialog
from common.errors import ServerError
from common.variables import *
logger = logging.getLogger('client_dist')
class ClientMainWindow(QMainWindow):
'''
Класс - основное окно пользователя.
Содержит всю основную логику работы клиентского модуля.
Конфигурация окна создана в QTDesigner и загружается из
конвертированого файла main_window_conv.py
'''
def __init__(self, database, transport, keys):
super().__init__()
# основные переменные
self.database = database
self.transport = transport
# объект - дешифорвщик сообщений с предзагруженным ключём
self.decrypter = PKCS1_OAEP.new(keys)
# Загружаем конфигурацию окна из дизайнера
self.ui = Ui_MainClientWindow()
self.ui.setupUi(self)
# Кнопка "Выход"
self.ui.menu_exit.triggered.connect(qApp.exit)
# Кнопка отправить сообщение
self.ui.btn_send.clicked.connect(self.send_message)
# "добавить контакт"
self.ui.btn_add_contact.clicked.connect(self.add_contact_window)
self.ui.menu_add_contact.triggered.connect(self.add_contact_window)
# Удалить контакт
self.ui.btn_remove_contact.clicked.connect(self.delete_contact_window)
self.ui.menu_del_contact.triggered.connect(self.delete_contact_window)
# Дополнительные требующиеся атрибуты
self.contacts_model = None
self.history_model = None
self.messages = QMessageBox()
self.current_chat = None
self.current_chat_key = None
self.encryptor = None
self.ui.list_messages.setHorizontalScrollBarPolicy(
Qt.ScrollBarAlwaysOff)
self.ui.list_messages.setWordWrap(True)
# Даблклик по листу контактов отправляется в обработчик
self.ui.list_contacts.doubleClicked.connect(self.select_active_user)
self.clients_list_update()
self.set_disabled_input()
self.show()
def set_disabled_input(self):
''' Метод делающий поля ввода неактивными'''
# Надпись - получатель.
self.ui.label_new_message.setText(
'Для выбора получателя дважды кликните на нем в окне контактов.')
self.ui.text_message.clear()
if self.history_model:
self.history_model.clear()
# Поле ввода и кнопка отправки неактивны до выбора получателя.
self.ui.btn_clear.setDisabled(True)
self.ui.btn_send.setDisabled(True)
self.ui.text_message.setDisabled(True)
self.encryptor = None
self.current_chat = None
self.current_chat_key = None
def history_list_update(self):
'''
Метод заполняющий соответствующий QListView
историей переписки с текущим собеседником.
'''
# Получаем историю сортированную по дате
list = sorted(
self.database.get_history(
self.current_chat),
key=lambda item: item[3])
# Если модель не создана, создадим.
if not self.history_model:
self.history_model = QStandardItemModel()
self.ui.list_messages.setModel(self.history_model)
# Очистим от старых записей
self.history_model.clear()
# Берём не более 20 последних записей.
length = len(list)
start_index = 0
if length > 20:
start_index = length - 20
# Заполнение модели записями, так-же стоит разделить входящие
# и исходящие выравниванием и разным фоном.
# отображает только последие 20 сообщений
for i in range(start_index, length):
item = list[i]
if item[1] == 'in':
mess = QStandardItem(
f'Входящее от {item[3].replace(microsecond=0)}:\n {item[2]}')
mess.setEditable(False)
mess.setBackground(QBrush(QColor(255, 213, 213)))
mess.setTextAlignment(Qt.AlignLeft)
self.history_model.appendRow(mess)
else:
mess = QStandardItem(
f'Исходящее от {item[3].replace(microsecond=0)}:\n {item[2]}')
mess.setEditable(False)
mess.setTextAlignment(Qt.AlignRight)
mess.setBackground(QBrush(QColor(204, 255, 204)))
self.history_model.appendRow(mess)
self.ui.list_messages.scrollToBottom()
def select_active_user(self):
'''Метод обработчик события двойного клика по списку контактов.'''
# Выбранный пользователем (даблклик) находится в выделеном элементе в
# QListView
self.current_chat = self.ui.list_contacts.currentIndex().data()
# вызываем основную функцию
self.set_active_user()
def set_active_user(self):
'''Метод активации чата с собеседником.'''
# Запрашиваем публичный ключ пользователя и создаём объект шифрования
try:
self.current_chat_key = self.transport.key_request(
self.current_chat)
logger.debug(f'Загружен открытый ключ для {self.current_chat}')
if self.current_chat_key:
self.encryptor = PKCS1_OAEP.new(
RSA.import_key(self.current_chat_key))
except (OSError, json.JSONDecodeError):
self.current_chat_key = None
self.encryptor = None
logger.debug(f'Не удалось получить ключ для {self.current_chat}')
# Если ключа нет то ошибка, что не удалось начать чат с пользователем
if not self.current_chat_key:
self.messages.warning(
self, 'Ошибка', 'Для выбранного пользователя нет ключа шифрования.')
return
# Ставим надпись и активируем кнопки
self.ui.label_new_message.setText(
f'Введите сообщенние для {self.current_chat}:')
self.ui.btn_clear.setDisabled(False)
self.ui.btn_send.setDisabled(False)
self.ui.text_message.setDisabled(False)
# Заполняем окно историю сообщений по требуемому пользователю.
self.history_list_update()
def clients_list_update(self):
'''Метод обновляющий список контактов.'''
contacts_list = self.database.get_contacts()
self.contacts_model = QStandardItemModel()
for i in sorted(contacts_list):
item = QStandardItem(i)
item.setEditable(False)
self.contacts_model.appendRow(item)
self.ui.list_contacts.setModel(self.contacts_model)
def add_contact_window(self):
'''Метод создающий окно - диалог добавления контакта'''
global select_dialog
select_dialog = AddContactDialog(self.transport, self.database)
select_dialog.btn_ok.clicked.connect(
lambda: self.add_contact_action(select_dialog))
select_dialog.show()
def add_contact_action(self, item):
'''Метод обработчк нажатия кнопки "Добавить"'''
new_contact = item.selector.currentText()
self.add_contact(new_contact)
item.close()
def add_contact(self, new_contact):
'''
Метод добавляющий контакт в серверную и клиентсткую BD.
После обновления баз данных обновляет и содержимое окна.
'''
try:
self.transport.add_contact(new_contact)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.add_contact(new_contact)
new_contact = QStandardItem(new_contact)
new_contact.setEditable(False)
self.contacts_model.appendRow(new_contact)
logger.info(f'Успешно добавлен контакт {new_contact}')
self.messages.information(
self, 'Успех', 'Контакт успешно добавлен.')
def delete_contact_window(self):
'''Метод создающий окно удаления контакта.'''
global remove_dialog
remove_dialog = DelContactDialog(self.database)
remove_dialog.btn_ok.clicked.connect(
lambda: self.delete_contact(remove_dialog))
remove_dialog.show()
def delete_contact(self, item):
'''
Метод удаляющий контакт из серверной и клиентсткой BD.
После обновления баз данных обновляет и содержимое окна.
'''
selected = item.selector.currentText()
try:
self.transport.remove_contact(selected)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.del_contact(selected)
self.clients_list_update()
logger.info(f'Успешно удалён контакт {selected}')
self.messages.information(self, 'Успех', 'Контакт успешно удалён.')
item.close()
# Если удалён активный пользователь, то деактивируем поля ввода.
if selected == self.current_chat:
self.current_chat = None
self.set_disabled_input()
def send_message(self):
'''
Функция отправки сообщения текущему собеседнику.
Реализует шифрование сообщения и его отправку.
'''
# Текст в поле, проверяем что поле не пустое затем забирается сообщение
# и поле очищается
message_text = self.ui.text_message.toPlainText()
self.ui.text_message.clear()
if not message_text:
return
# Шифруем сообщение ключом получателя и упаковываем в base64.
message_text_encrypted = self.encryptor.encrypt(
message_text.encode('utf8'))
message_text_encrypted_base64 = base64.b64encode(
message_text_encrypted)
try:
self.transport.send_message(
self.current_chat,
message_text_encrypted_base64.decode('ascii'))
pass
except ServerError as err:
self.messages.critical(self, 'Ошибка', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
except (ConnectionResetError, ConnectionAbortedError):
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
else:
self.database.save_message(self.current_chat, 'out', message_text)
logger.debug(
f'Отправлено сообщение для {self.current_chat}: {message_text}')
self.history_list_update()
@pyqtSlot(dict)
def message(self, message):
'''
Слот обработчик поступаемых сообщений, выполняет дешифровку
поступаемых сообщений и их сохранение в истории сообщений.
Запрашивает пользователя если пришло сообщение не от текущего
собеседника. При необходимости меняет собеседника.
'''
# Получаем строку байтов
encrypted_message = base64.b64decode(message[MESSAGE_TEXT])
# Декодируем строку, при ошибке выдаём сообщение и завершаем функцию
try:
decrypted_message = self.decrypter.decrypt(encrypted_message)
except (ValueError, TypeError):
self.messages.warning(
self, 'Ошибка', 'Не удалось декодировать сообщение.')
return
# Сохраняем сообщение в базу и обновляем историю сообщений или
# открываем новый чат.
self.database.save_message(
self.current_chat,
'in',
decrypted_message.decode('utf8'))
sender = message[SENDER]
if sender == self.current_chat:
self.history_list_update()
else:
# Проверим есть ли такой пользователь у нас в контактах:
if self.database.check_contact(sender):
# Если есть, спрашиваем и желании открыть с ним чат и открываем
# при желании
if self.messages.question(
self,
'Новое сообщение',
f'Получено новое сообщение от {sender}, открыть чат с ним?',
QMessageBox.Yes,
QMessageBox.No) == QMessageBox.Yes:
self.current_chat = sender
self.set_active_user()
else:
print('NO')
# Раз нету,спрашиваем хотим ли добавить юзера в контакты.
if self.messages.question(
self,
'Новое сообщение',
f'Получено новое сообщение от {sender}.\n Данного пользователя нет в вашем контакт-листе.\n Добавить в контакты и открыть чат с ним?',
QMessageBox.Yes,
QMessageBox.No) == QMessageBox.Yes:
self.add_contact(sender)
self.current_chat = sender
# Нужно заново сохранить сообщение, иначе оно будет потеряно,
# т.к. на момент предыдущего вызова контакта не было.
self.database.save_message(
self.current_chat, 'in', decrypted_message.decode('utf8'))
self.set_active_user()
@pyqtSlot()
def connection_lost(self):
'''
Слот обработчик потери соеднинения с сервером.
Выдаёт окно предупреждение и завершает работу приложения.
'''
self.messages.warning(
self,
'Сбой соединения',
'Потеряно соединение с сервером. ')
self.close()
@pyqtSlot()
def sig_205(self):
'''
Слот выполняющий обновление баз данных по команде сервера.
'''
if self.current_chat and not self.database.check_user(
self.current_chat):
self.messages.warning(
self,
'Сочувствую',
'К сожалению собеседник был удалён с сервера.')
self.set_disabled_input()
self.current_chat = None
self.clients_list_update()
def make_connection(self, trans_obj):
'''Метод обеспечивающий соединение сигналов и слотов.'''
trans_obj.new_message.connect(self.message)
trans_obj.connection_lost.connect(self.connection_lost)
trans_obj.message_205.connect(self.sig_205)
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/client/main_window.py
|
main_window.py
|
import datetime
import sys
sys.path.append('../')
from common.variables import *
from sqlalchemy import create_engine, Table, Column, Integer, String, Text, MetaData, DateTime
from sqlalchemy.orm import mapper, sessionmaker
import os
class ClientDatabase:
"""
Класс - оболочка для работы с базой данных клиента.
Использует SQLite базу данных, реализован с помощью
SQLAlchemy ORM и используется классический подход.
"""
class KnownUsers:
"""
Класс - отображение для таблицы всех пользователей.
"""
def __init__(self, user):
self.id = None
self.username = user
class MessageStat:
'''
Класс - отображение для таблицы статистики переданных сообщений.
'''
def __init__(self, contact, direction, message):
self.id = None
self.contact = contact
self.direction = direction
self.message = message
self.date = datetime.datetime.now()
class Contacts:
'''
Класс - отображение для таблицы контактов.
'''
def __init__(self, contact):
self.id = None
self.name = contact
# Конструктор класса:
def __init__(self, name):
# Создаём движок базы данных, поскольку разрешено несколько
# клиентов одновременно, каждый должен иметь свою БД
# Поскольку клиент мультипоточный необходимо отключить
# проверки на подключения с разных потоков,
# иначе sqlite3.ProgrammingError
path = os.path.dirname(os.path.realpath(__file__))
filename = f'client_{name}.db3'
self.database_engine = create_engine(
f'sqlite:///{os.path.join(path, filename)}',
echo=False,
pool_recycle=7200,
connect_args={
'check_same_thread': False})
# Создаём объект MetaData
self.metadata = MetaData()
# Создаём таблицу известных пользователей
users = Table('known_users', self.metadata,
Column('id', Integer, primary_key=True),
Column('username', String)
)
# Создаём таблицу истории сообщений
history = Table('message_history', self.metadata,
Column('id', Integer, primary_key=True),
Column('contact', String),
Column('direction', String),
Column('message', Text),
Column('date', DateTime)
)
# Создаём таблицу контактов
contacts = Table('contacts', self.metadata,
Column('id', Integer, primary_key=True),
Column('name', String, unique=True)
)
# Создаём таблицы
self.metadata.create_all(self.database_engine)
# Создаём отображения
mapper(self.KnownUsers, users)
mapper(self.MessageStat, history)
mapper(self.Contacts, contacts)
# Создаём сессию
Session = sessionmaker(bind=self.database_engine)
self.session = Session()
# Необходимо очистить таблицу контактов, т.к. при запуске они
# подгружаются с сервера.
self.session.query(self.Contacts).delete()
self.session.commit()
def add_contact(self, contact):
""" Метод добавляющий контакт в базу данных. """
if not self.session.query(
self.Contacts).filter_by(
name=contact).count():
contact_row = self.Contacts(contact)
self.session.add(contact_row)
self.session.commit()
def contacts_clear(self):
""" Метод, очищающий таблицу со списком контактов. """
self.session.query(self.Contacts).delete()
self.session.commit()
def del_contact(self, contact):
""" Метод, удаляющий определённый контакт. """
self.session.query(self.Contacts).filter_by(name=contact).delete()
self.session.commit()
def add_users(self, users_list):
""" Метод, заполняющий таблицу известных пользователей. """
self.session.query(self.KnownUsers).delete()
for user in users_list:
user_row = self.KnownUsers(user)
self.session.add(user_row)
self.session.commit()
def save_message(self, contact, direction, message):
""" Метод, сохраняющий сообщение в базе данных. """
message_row = self.MessageStat(contact, direction, message)
self.session.add(message_row)
self.session.commit()
def get_contacts(self):
""" Метод, возвращающий список всех контактов. """
return [contact[0]
for contact in self.session.query(self.Contacts.name).all()]
def get_users(self):
""" Метод возвращающий список всех известных пользователей. """
return [user[0]
for user in self.session.query(self.KnownUsers.username).all()]
def check_user(self, user):
""" Метод, проверяющий существует ли пользователь. """
if self.session.query(
self.KnownUsers).filter_by(
username=user).count():
return True
else:
return False
def check_contact(self, contact):
""" Метод, проверяющий существует ли контакт. """
if self.session.query(self.Contacts).filter_by(name=contact).count():
return True
else:
return False
def get_history(self, contact):
""" Метод, возвращающий историю сообщений с определённым пользователем. """
query = self.session.query(
self.MessageStat).filter_by(
contact=contact)
return [(history_row.contact,
history_row.direction,
history_row.message,
history_row.date) for history_row in query.all()]
# отладка
if __name__ == '__main__':
test_db = ClientDatabase('test1')
for i in ['test3', 'test4', 'test5']:
test_db.add_contact(i)
test_db.add_contact('test4')
test_db.add_users(['test1', 'test2', 'test3', 'test4', 'test5'])
test_db.save_message('test2', 'in', f'Привет! я тестовое сообщение от {datetime.datetime.now()}!')
test_db.save_message('test2', 'out', f'Привет! я другое тестовое сообщение от {datetime.datetime.now()}!')
print(test_db.get_contacts())
print(test_db.get_users())
print(test_db.check_user('test1'))
print(test_db.check_user('test10'))
print(sorted(test_db.get_history('test2'), key=lambda item: item[3]))
test_db.del_contact('test4')
print(test_db.get_contacts())
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/client/database.py
|
database.py
|
import socket
import logging
import sys
sys.path.append('../')
# метод определения модуля, источника запуска.
if sys.argv[0].find('client_dist') == -1:
# если не клиент то сервер!
logger = logging.getLogger('server_dist')
else:
# иначе сервер
logger = logging.getLogger('client_dist')
def log(func_to_log):
"""
Декоратор, выполняющий логирование вызовов функций.
Сохраняет события типа debug, содержащие
информацию о имени вызываемой функиции, параметры с которыми
вызывается функция, и модуль, вызывающий функцию.
"""
def log_saver(*args, **kwargs):
logger.debug(
f'Была вызвана функция {func_to_log.__name__} c параметрами {args} , {kwargs}. '
f'Вызов из модуля {func_to_log.__module__}')
ret = func_to_log(*args, **kwargs)
return ret
return log_saver
def login_required(func):
"""
Декоратор, проверяющий, что клиент авторизован на сервере.
Проверяет, что передаваемый объект сокета находится в
списке авторизованных клиентов.
За исключением передачи словаря-запроса
на авторизацию. Если клиент не авторизован,
генерирует исключение TypeError
"""
def checker(*args, **kwargs):
# проверяем, что первый аргумент - экземпляр MessageProcessor
# Импортить необходимо тут, иначе ошибка рекурсивного импорта.
from server.core import MessageProcessor
from common.variables import ACTION, PRESENCE
if isinstance(args[0], MessageProcessor):
found = False
for arg in args:
if isinstance(arg, socket.socket):
# Проверяем, что данный сокет есть в списке names класса
# MessageProcessor
for client in args[0].names:
if args[0].names[client] == arg:
found = True
# Теперь надо проверить, что передаваемые аргументы не presence
# сообщение. Если presence, то разрешаем
for arg in args:
if isinstance(arg, dict):
if ACTION in arg and arg[ACTION] == PRESENCE:
found = True
# Если не авторизован и не сообщение начала авторизации, то
# вызываем исключение.
if not found:
raise TypeError
return func(*args, **kwargs)
return checker
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/common/decos.py
|
decos.py
|
import dis
from pprint import pprint
# Метакласс для проверки соответствия сервера:
class ServerMaker(type):
def __init__(cls, clsname, bases, clsdict):
"""
:param clsname: - экземпляр метакласса - Server
:param bases: кортеж базовых классов - ()
:param clsdict: словарь атрибутов и методов экземпляра метакласса
"""
# Список методов, которые используются в функциях класса:
methods = [] # с помощью 'LOAD_GLOBAL'
methods_2 = [] # методы, обёрнутые декораторами попадают не в 'LOAD_GLOBAL', а в 'LOAD_METHOD'
# Атрибуты, используемые в функциях классов
attrs = []
for func in clsdict:
try:
ret = dis.get_instructions(clsdict[func])
except TypeError:
pass
else:
# Если функция разбираем код, получая используемые методы и атрибуты.
for i in ret:
print(i)
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
# заполняем список методами, использующимися в функциях класса
methods.append(i.argval)
elif i.opname == 'LOAD_METHOD':
if i.argval not in methods_2:
methods_2.append(i.argval)
elif i.opname == 'LOAD_ATTR':
if i.argval not in attrs:
# заполняем список атрибутами, использующимися в функциях класса
attrs.append(i.argval)
print(20 * '-', 'methods', 20 * '-')
pprint(methods)
print(20 * '-', 'methods_2', 20 * '-')
pprint(methods_2)
print(20 * '-', 'attrs', 20 * '-')
pprint(attrs)
print(50 * '-')
if 'connect' in methods:
raise TypeError('Использование метода connect недопустимо в серверном классе')
if not ('SOCK_STREAM' in attrs and 'AF_INET' in attrs):
raise TypeError('Некорректная инициализация сокета.')
# Вызываем конструктор предка
super().__init__(clsname, bases, clsdict)
# Метакласс для проверки корректности клиентов:
class ClientMaker(type):
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
for func in clsdict:
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
#Если функция разбираем код, получая используемые методы.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
# Если обнаружено использование недопустимого метода accept, listen, socket бросаем исключение:
for command in ('accept', 'listen', 'socket'):
if command in methods:
raise TypeError('В классе обнаружено использование запрещённого метода')
# Вызов get_message или send_message из utils считаем корректным использованием сокетов
if 'get_message' in methods or 'send_message' in methods:
pass
else:
raise TypeError('Отсутствуют вызовы функций, работающих с сокетами.')
super().__init__(clsname, bases, clsdict)
|
ApplicationClientServer-client
|
/ApplicationClientServer_client-0.1.tar.gz/ApplicationClientServer_client-0.1/common/metaclasses.py
|
metaclasses.py
|
from PyQt5.QtWidgets import QDialog, QLabel, QLineEdit, QPushButton, QFileDialog, QMessageBox
from PyQt5.QtCore import Qt
import os
class ConfigWindow(QDialog):
'''Класс окно настроек.'''
def __init__(self, config):
super().__init__()
self.config = config
self.initUI()
def initUI(self):
'''Настройки окна'''
self.setFixedSize(365, 260)
self.setWindowTitle('Настройки сервера')
self.setAttribute(Qt.WA_DeleteOnClose)
self.setModal(True)
# Надпись о файле базы данных:
self.db_path_label = QLabel('Путь до файла базы данных: ', self)
self.db_path_label.move(10, 10)
self.db_path_label.setFixedSize(240, 15)
# Строка с путём базы
self.db_path = QLineEdit(self)
self.db_path.setFixedSize(250, 20)
self.db_path.move(10, 30)
self.db_path.setReadOnly(True)
# Кнопка выбора пути.
self.db_path_select = QPushButton('Обзор...', self)
self.db_path_select.move(275, 28)
# Метка с именем поля файла базы данных
self.db_file_label = QLabel('Имя файла базы данных: ', self)
self.db_file_label.move(10, 68)
self.db_file_label.setFixedSize(180, 15)
# Поле для ввода имени файла
self.db_file = QLineEdit(self)
self.db_file.move(200, 66)
self.db_file.setFixedSize(150, 20)
# Метка с номером порта
self.port_label = QLabel('Номер порта для соединений:', self)
self.port_label.move(10, 108)
self.port_label.setFixedSize(180, 15)
# Поле для ввода номера порта
self.port = QLineEdit(self)
self.port.move(200, 108)
self.port.setFixedSize(150, 20)
# Метка с адресом для соединений
self.ip_label = QLabel('С какого IP принимаем соединения:', self)
self.ip_label.move(10, 148)
self.ip_label.setFixedSize(180, 15)
# Метка с напоминанием о пустом поле.
self.ip_label_note = QLabel(
' оставьте это поле пустым, чтобы\n принимать соединения с любых адресов.',
self)
self.ip_label_note.move(10, 168)
self.ip_label_note.setFixedSize(500, 30)
# Поле для ввода ip
self.ip = QLineEdit(self)
self.ip.move(200, 148)
self.ip.setFixedSize(150, 20)
# Кнопка сохранения настроек
self.save_btn = QPushButton('Сохранить', self)
self.save_btn.move(190, 220)
# Кнапка закрытия окна
self.close_button = QPushButton('Закрыть', self)
self.close_button.move(275, 220)
self.close_button.clicked.connect(self.close)
self.db_path_select.clicked.connect(self.open_file_dialog)
self.show()
self.db_path.insert(self.config['SETTINGS']['Database_path'])
self.db_file.insert(self.config['SETTINGS']['Database_file'])
self.port.insert(self.config['SETTINGS']['Default_port'])
self.ip.insert(self.config['SETTINGS']['Listen_Address'])
self.save_btn.clicked.connect(self.save_server_config)
def open_file_dialog(self):
'''Метод обработчик открытия окна выбора папки.'''
global dialog
dialog = QFileDialog(self)
path = dialog.getExistingDirectory()
path = path.replace('/', '\\')
self.db_path.clear()
self.db_path.insert(path)
def save_server_config(self):
'''
Метод сохранения настроек.
Проверяет правильность введённых данных и
если всё правильно сохраняет ini файл.
'''
global config_window
message = QMessageBox()
self.config['SETTINGS']['Database_path'] = self.db_path.text()
self.config['SETTINGS']['Database_file'] = self.db_file.text()
try:
port = int(self.port.text())
except ValueError:
message.warning(self, 'Ошибка', 'Порт должен быть числом')
else:
self.config['SETTINGS']['Listen_Address'] = self.ip.text()
if 1023 < port < 65536:
self.config['SETTINGS']['Default_port'] = str(port)
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path = os.path.join(dir_path, '..')
with open(f"{dir_path}/{'server_dist+++.ini'}", 'w') as conf:
self.config.write(conf)
message.information(
self, 'OK', 'Настройки успешно сохранены!')
else:
message.warning(
self, 'Ошибка', 'Порт должен быть от 1024 до 65536')
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/server/config_window.py
|
config_window.py
|
from PyQt5.QtWidgets import QDialog, QLabel, QComboBox, QPushButton, QApplication
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QStandardItemModel, QStandardItem
class DelUserDialog(QDialog):
'''
Класс - диалог выбора контакта для удаления.
'''
def __init__(self, database, server):
super().__init__()
self.database = database
self.server = server
self.setFixedSize(350, 120)
self.setWindowTitle('Удаление пользователя')
self.setAttribute(Qt.WA_DeleteOnClose)
self.setModal(True)
self.selector_label = QLabel(
'Выберите пользователя для удаления:', self)
self.selector_label.setFixedSize(200, 20)
self.selector_label.move(10, 0)
self.selector = QComboBox(self)
self.selector.setFixedSize(200, 20)
self.selector.move(10, 30)
self.btn_ok = QPushButton('Удалить', self)
self.btn_ok.setFixedSize(100, 30)
self.btn_ok.move(230, 20)
self.btn_ok.clicked.connect(self.remove_user)
self.btn_cancel = QPushButton('Отмена', self)
self.btn_cancel.setFixedSize(100, 30)
self.btn_cancel.move(230, 60)
self.btn_cancel.clicked.connect(self.close)
self.all_users_fill()
def all_users_fill(self):
'''Метод заполняющий список пользователей.'''
self.selector.addItems([item[0]
for item in self.database.users_list()])
def remove_user(self):
'''Метод - обработчик удаления пользователя.'''
self.database.remove_user(self.selector.currentText())
if self.selector.currentText() in self.server.names:
sock = self.server.names[self.selector.currentText()]
del self.server.names[self.selector.currentText()]
self.server.remove_client(sock)
# Рассылаем клиентам сообщение о необходимости обновить справочники
self.server.service_update_lists()
self.close()
if __name__ == '__main__':
app = QApplication([])
from database import ServerStorage
database = ServerStorage('../server_database.db3')
import os
import sys
path1 = os.path.join(os.getcwd(), '..')
sys.path.insert(0, path1)
from core import MessageProcessor
server = MessageProcessor('127.0.0.1', 7777, database)
dial = DelUserDialog(database, server)
dial.show()
app.exec_()
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/server/remove_user.py
|
remove_user.py
|
import threading
import logging
import select
import socket
import json
import hmac
import binascii
import os
import sys
sys.path.append('../')
from common.metaclasses import ServerMaker
from common.descripts import Port
from common.variables import *
from common.utils import send_message, get_message
from common.decos import login_required
# Загрузка логера
logger = logging.getLogger('server_dist')
class MessageProcessor(threading.Thread):
"""
Основной класс сервера. Принимает содинения, словари - пакеты
от клиентов, обрабатывает поступающие сообщения.
Работает в качестве отдельного потока.
"""
port = Port()
def __init__(self, listen_address, listen_port, database):
# Параметры подключения
self.addr = listen_address
self.port = listen_port
# База данных сервера
self.database = database
# Сокет, через который будет осуществляться работа
self.sock = None
# Список подключённых клиентов.
self.clients = []
# Сокеты
self.listen_sockets = None
self.error_sockets = None
# Флаг продолжения работы
self.running = True
# Словарь содержащий сопоставленные имена и соответствующие им сокеты.
self.names = dict()
# Конструктор предка
super().__init__()
def run(self):
'''Метод основной цикл потока.'''
# Инициализация Сокета
self.init_socket()
# Основной цикл программы сервера
while self.running:
# Ждём подключения, если таймаут вышел, ловим исключение.
try:
client, client_address = self.sock.accept()
except OSError:
pass
else:
logger.info(f'Установлено соедение с ПК {client_address}')
client.settimeout(5)
self.clients.append(client)
recv_data_lst = []
send_data_lst = []
err_lst = []
# Проверяем на наличие ждущих клиентов
try:
if self.clients:
recv_data_lst, self.listen_sockets, self.error_sockets = select.select(
self.clients, self.clients, [], 0)
except OSError as err:
logger.error(f'Ошибка работы с сокетами: {err.errno}')
# принимаем сообщения и если ошибка, исключаем клиента.
if recv_data_lst:
for client_with_message in recv_data_lst:
try:
self.process_client_message(
get_message(client_with_message), client_with_message)
except (OSError, json.JSONDecodeError, TypeError) as err:
logger.debug(f'Getting data from client exception.', exc_info=err)
self.remove_client(client_with_message)
def remove_client(self, client):
'''
Метод обработчик клиента с которым прервана связь.
Ищет клиента и удаляет его из списков и базы:
'''
logger.info(f'Клиент {client.getpeername()} отключился от сервера.')
for name in self.names:
if self.names[name] == client:
self.database.user_logout(name)
del self.names[name]
break
self.clients.remove(client)
client.close()
def init_socket(self):
'''Метод инициализатор сокета.'''
logger.info(
f'Запущен сервер, порт для подключений: {self.port} , адрес с которого принимаются подключения: {self.addr}. Если адрес не указан, принимаются соединения с любых адресов.')
# Готовим сокет
transport = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
transport.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
transport.bind((self.addr, self.port))
transport.settimeout(0.5)
# Начинаем слушать сокет.
self.sock = transport
self.sock.listen(MAX_CONNECTIONS)
def process_message(self, message):
'''
Метод отправки сообщения клиенту.
'''
if message[DESTINATION] in self.names and self.names[message[DESTINATION]
] in self.listen_sockets:
try:
send_message(self.names[message[DESTINATION]], message)
logger.info(
f'Отправлено сообщение пользователю {message[DESTINATION]} от пользователя {message[SENDER]}.')
except OSError:
self.remove_client(message[DESTINATION])
elif message[DESTINATION] in self.names and self.names[message[DESTINATION]] not in self.listen_sockets:
logger.error(
f'Связь с клиентом {message[DESTINATION]} была потеряна. Соединение закрыто, доставка невозможна.')
self.remove_client(self.names[message[DESTINATION]])
else:
logger.error(
f'Пользователь {message[DESTINATION]} не зарегистрирован на сервере, отправка сообщения невозможна.')
@login_required
def process_client_message(self, message, client):
""" Метод обработчик поступающих сообщений. """
logger.debug(f'Разбор сообщения от клиента : {message}')
# Если это сообщение о присутствии, принимаем и отвечаем
if ACTION in message and message[ACTION] == PRESENCE and TIME in message and USER in message:
# Если сообщение о присутствии то вызываем функцию авторизации.
self.autorize_user(message, client)
# Если это сообщение, то отправляем его получателю.
elif ACTION in message and message[ACTION] == MESSAGE and DESTINATION in message and TIME in message \
and SENDER in message and MESSAGE_TEXT in message and self.names[message[SENDER]] == client:
if message[DESTINATION] in self.names:
self.database.process_message(
message[SENDER], message[DESTINATION])
self.process_message(message)
try:
send_message(client, RESPONSE_200)
except OSError:
self.remove_client(client)
else:
response = RESPONSE_400
response[ERROR] = 'Пользователь не зарегистрирован на сервере.'
try:
send_message(client, response)
except OSError:
pass
return
# Если клиент выходит
elif ACTION in message and message[ACTION] == EXIT and ACCOUNT_NAME in message \
and self.names[message[ACCOUNT_NAME]] == client:
self.remove_client(client)
# Если это запрос контакт-листа
elif ACTION in message and message[ACTION] == GET_CONTACTS and USER in message and \
self.names[message[USER]] == client:
response = RESPONSE_202
response[LIST_INFO] = self.database.get_contacts(message[USER])
try:
send_message(client, response)
except OSError:
self.remove_client(client)
# Если это добавление контакта
elif ACTION in message and message[ACTION] == ADD_CONTACT and ACCOUNT_NAME in message and USER in message \
and self.names[message[USER]] == client:
self.database.add_contact(message[USER], message[ACCOUNT_NAME])
try:
send_message(client, RESPONSE_200)
except OSError:
self.remove_client(client)
# Если это удаление контакта
elif ACTION in message and message[ACTION] == REMOVE_CONTACT and ACCOUNT_NAME in message and USER in message \
and self.names[message[USER]] == client:
self.database.remove_contact(message[USER], message[ACCOUNT_NAME])
try:
send_message(client, RESPONSE_200)
except OSError:
self.remove_client(client)
# Если это запрос известных пользователей
elif ACTION in message and message[ACTION] == USERS_REQUEST and ACCOUNT_NAME in message \
and self.names[message[ACCOUNT_NAME]] == client:
response = RESPONSE_202
response[LIST_INFO] = [user[0]
for user in self.database.users_list()]
try:
send_message(client, response)
except OSError:
self.remove_client(client)
# Если это запрос публичного ключа пользователя
elif ACTION in message and message[ACTION] == PUBLIC_KEY_REQUEST and ACCOUNT_NAME in message:
response = RESPONSE_511
response[DATA] = self.database.get_pubkey(message[ACCOUNT_NAME])
# может быть, что ключа ещё нет (пользователь никогда не логинился,
# тогда шлём 400)
if response[DATA]:
try:
send_message(client, response)
except OSError:
self.remove_client(client)
else:
response = RESPONSE_400
response[ERROR] = 'Нет публичного ключа для данного пользователя'
try:
send_message(client, response)
except OSError:
self.remove_client(client)
# Иначе отдаём Bad request
else:
response = RESPONSE_400
response[ERROR] = 'Запрос некорректен.'
try:
send_message(client, response)
except OSError:
self.remove_client(client)
def autorize_user(self, message, sock):
""" Метод реализующий авторизацию пользователей. """
# Если имя пользователя уже занято то возвращаем 400
logger.debug(f'Start auth process for {message[USER]}')
if message[USER][ACCOUNT_NAME] in self.names.keys():
response = RESPONSE_400
response[ERROR] = 'Имя пользователя уже занято.'
try:
logger.debug(f'Username busy, sending {response}')
send_message(sock, response)
except OSError:
logger.debug('OS Error')
pass
self.clients.remove(sock)
sock.close()
# Проверяем что пользователь зарегистрирован на сервере.
elif not self.database.check_user(message[USER][ACCOUNT_NAME]):
response = RESPONSE_400
response[ERROR] = 'Пользователь не зарегистрирован.'
try:
logger.debug(f'Unknown username, sending {response}')
send_message(sock, response)
except OSError:
pass
self.clients.remove(sock)
sock.close()
else:
logger.debug('Correct username, starting passwd check.')
# Иначе отвечаем 511 и проводим процедуру авторизации
# Словарь - заготовка
message_auth = RESPONSE_511
# Набор байтов в hex представлении
random_str = binascii.hexlify(os.urandom(64))
# В словарь байты нельзя, декодируем (json.dumps -> TypeError)
message_auth[DATA] = random_str.decode('ascii')
# Создаём хэш пароля и связки с рандомной строкой, сохраняем
# серверную версию ключа
hash = hmac.new(self.database.get_hash(message[USER][ACCOUNT_NAME]), random_str, 'MD5')
digest = hash.digest()
logger.debug(f'Auth message = {message_auth}')
try:
# Обмен с клиентом
send_message(sock, message_auth)
ans = get_message(sock)
except OSError as err:
logger.debug('Error in auth, data:', exc_info=err)
sock.close()
return
client_digest = binascii.a2b_base64(ans[DATA])
# Если ответ клиента корректный, то сохраняем его в список
# пользователей.
if RESPONSE in ans and ans[RESPONSE] == 511 and \
hmac.compare_digest(digest, client_digest):
self.names[message[USER][ACCOUNT_NAME]] = sock
client_ip, client_port = sock.getpeername()
try:
send_message(sock, RESPONSE_200)
except OSError:
self.remove_client(message[USER][ACCOUNT_NAME])
# добавляем пользователя в список активных и,
# если у него изменился открытый ключ, то сохраняем новый
self.database.user_login(
message[USER][ACCOUNT_NAME],
client_ip,
client_port,
message[USER][PUBLIC_KEY])
else:
response = RESPONSE_400
response[ERROR] = 'Неверный пароль.'
try:
send_message(sock, response)
except OSError:
pass
self.clients.remove(sock)
sock.close()
def service_update_lists(self):
'''Метод реализующий отправки сервисного сообщения 205 клиентам.'''
for client in self.names:
try:
send_message(self.names[client], RESPONSE_205)
except OSError:
self.remove_client(self.names[client])
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/server/core.py
|
core.py
|
from PyQt5.QtWidgets import QDialog, QPushButton, QLineEdit, QApplication, QLabel, QMessageBox
from PyQt5.QtCore import Qt
import hashlib
import binascii
class RegisterUser(QDialog):
""" Класс диалог регистрации пользователя на сервере. """
def __init__(self, database, server):
super().__init__()
self.database = database
self.server = server
self.setWindowTitle('Регистрация')
self.setFixedSize(175, 183)
self.setModal(True)
self.setAttribute(Qt.WA_DeleteOnClose)
self.label_username = QLabel('Введите имя пользователя:', self)
self.label_username.move(10, 10)
self.label_username.setFixedSize(150, 15)
self.client_name = QLineEdit(self)
self.client_name.setFixedSize(154, 20)
self.client_name.move(10, 30)
self.label_passwd = QLabel('Введите пароль:', self)
self.label_passwd.move(10, 55)
self.label_passwd.setFixedSize(150, 15)
self.client_passwd = QLineEdit(self)
self.client_passwd.setFixedSize(154, 20)
self.client_passwd.move(10, 75)
self.client_passwd.setEchoMode(QLineEdit.Password)
self.label_conf = QLabel('Введите подтверждение:', self)
self.label_conf.move(10, 100)
self.label_conf.setFixedSize(150, 15)
self.client_conf = QLineEdit(self)
self.client_conf.setFixedSize(154, 20)
self.client_conf.move(10, 120)
self.client_conf.setEchoMode(QLineEdit.Password)
self.btn_ok = QPushButton('Сохранить', self)
self.btn_ok.move(10, 150)
self.btn_ok.clicked.connect(self.save_data)
self.btn_cancel = QPushButton('Выход', self)
self.btn_cancel.move(90, 150)
self.btn_cancel.clicked.connect(self.close)
self.messages = QMessageBox()
self.show()
def save_data(self):
"""
Метод проверки правильности ввода и сохранения в базу нового пользователя.
"""
if not self.client_name.text():
self.messages.critical(
self, 'Ошибка', 'Не указано имя пользователя.')
return
elif self.client_passwd.text() != self.client_conf.text():
self.messages.critical(
self, 'Ошибка', 'Введённые пароли не совпадают.')
return
elif self.database.check_user(self.client_name.text()):
self.messages.critical(
self, 'Ошибка', 'Пользователь уже существует.')
return
else:
# Генерируем хэш пароля, в качестве соли будем использовать логин в
# нижнем регистре.
passwd_bytes = self.client_passwd.text().encode('utf-8')
salt = self.client_name.text().lower().encode('utf-8')
passwd_hash = hashlib.pbkdf2_hmac(
'sha512', passwd_bytes, salt, 10000)
self.database.add_user(
self.client_name.text(),
binascii.hexlify(passwd_hash))
self.messages.information(
self, 'Успех', 'Пользователь успешно зарегистрирован.')
# Рассылаем клиентам сообщение о необходимости обновить справочники
self.server.service_update_lists()
self.close()
if __name__ == '__main__':
app = QApplication([])
from database import ServerStorage
database = ServerStorage('../server_database.db3')
import os
import sys
path1 = os.path.join(os.getcwd(), '..')
sys.path.insert(0, path1)
from core import MessageProcessor
server = MessageProcessor('127.0.0.1', 7777, database)
dial = RegisterUser(database, server)
app.exec_()
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/server/add_user.py
|
add_user.py
|
from PyQt5.QtWidgets import QMainWindow, QAction, qApp, QApplication, QLabel, QTableView
from PyQt5.QtGui import QStandardItemModel, QStandardItem
from PyQt5.QtCore import QTimer
from server.stat_window import StatWindow
from server.config_window import ConfigWindow
from server.add_user import RegisterUser
from server.remove_user import DelUserDialog
class MainWindow(QMainWindow):
'''Класс - основное окно сервера.'''
def __init__(self, database, server, config):
# Конструктор предка
super().__init__()
# База данных сервера
self.database = database
self.server_thread = server
self.config = config
# Ярлык выхода
self.exitAction = QAction('Выход', self)
self.exitAction.setShortcut('Ctrl+Q')
self.exitAction.triggered.connect(qApp.quit)
# Кнопка обновить список клиентов
self.refresh_button = QAction('Обновить список', self)
# Кнопка настроек сервера
self.config_btn = QAction('Настройки сервера', self)
# Кнопка регистрации пользователя
self.register_btn = QAction('Регистрация пользователя', self)
# Кнопка удаления пользователя
self.remove_btn = QAction('Удаление пользователя', self)
# Кнопка вывести историю сообщений
self.show_history_button = QAction('История клиентов', self)
# Статусбар
self.statusBar()
self.statusBar().showMessage('Server Working')
# Тулбар
self.toolbar = self.addToolBar('MainBar')
self.toolbar.addAction(self.exitAction)
self.toolbar.addAction(self.refresh_button)
self.toolbar.addAction(self.show_history_button)
self.toolbar.addAction(self.config_btn)
self.toolbar.addAction(self.register_btn)
self.toolbar.addAction(self.remove_btn)
# Настройки геометрии основного окна
# Поскольку работать с динамическими размерами мы не умеем, и мало
# времени на изучение, размер окна фиксирован.
self.setFixedSize(800, 600)
self.setWindowTitle('Messaging Server alpha release')
# Надпись о том, что ниже список подключённых клиентов
self.label = QLabel('Список подключённых клиентов:', self)
self.label.setFixedSize(240, 15)
self.label.move(10, 25)
# Окно со списком подключённых клиентов.
self.active_clients_table = QTableView(self)
self.active_clients_table.move(10, 45)
self.active_clients_table.setFixedSize(780, 400)
# Таймер, обновляющий список клиентов 1 раз в секунду
self.timer = QTimer()
self.timer.timeout.connect(self.create_users_model)
self.timer.start(1000)
# Связываем кнопки с процедурами
self.refresh_button.triggered.connect(self.create_users_model)
self.show_history_button.triggered.connect(self.show_statistics)
self.config_btn.triggered.connect(self.server_config)
self.register_btn.triggered.connect(self.reg_user)
self.remove_btn.triggered.connect(self.rem_user)
# Последним параметром отображаем окно.
self.show()
def create_users_model(self):
'''Метод заполняющий таблицу активных пользователей.'''
list_users = self.database.active_users_list()
list = QStandardItemModel()
list.setHorizontalHeaderLabels(
['Имя Клиента', 'IP Адрес', 'Порт', 'Время подключения'])
for row in list_users:
user, ip, port, time = row
user = QStandardItem(user)
user.setEditable(False)
ip = QStandardItem(ip)
ip.setEditable(False)
port = QStandardItem(str(port))
port.setEditable(False)
# Уберём милисекунды из строки времени, т.к. такая точность не
# требуется.
time = QStandardItem(str(time.replace(microsecond=0)))
time.setEditable(False)
list.appendRow([user, ip, port, time])
self.active_clients_table.setModel(list)
self.active_clients_table.resizeColumnsToContents()
self.active_clients_table.resizeRowsToContents()
def show_statistics(self):
'''Метод создающий окно со статистикой клиентов.'''
global stat_window
stat_window = StatWindow(self.database)
stat_window.show()
def server_config(self):
'''Метод создающий окно с настройками сервера.'''
global config_window
# Создаём окно и заносим в него текущие параметры
config_window = ConfigWindow(self.config)
def reg_user(self):
'''Метод создающий окно регистрации пользователя.'''
global reg_window
reg_window = RegisterUser(self.database, self.server_thread)
reg_window.show()
def rem_user(self):
'''Метод создающий окно удаления пользователя.'''
global rem_window
rem_window = DelUserDialog(self.database, self.server_thread)
rem_window.show()
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/server/main_window.py
|
main_window.py
|
from sqlalchemy import create_engine, Table, Column, Integer, String, MetaData, ForeignKey, DateTime, Text
from sqlalchemy.orm import mapper, sessionmaker
import datetime
class ServerStorage:
'''
Класс - оболочка для работы с базой данных сервера.
Использует SQLite базу данных, реализован с помощью
SQLAlchemy ORM и используется классический подход.
'''
class AllUsers:
'''Класс - отображение таблицы всех пользователей.'''
def __init__(self, username, passwd_hash):
self.name = username
self.last_login = datetime.datetime.now()
self.passwd_hash = passwd_hash
self.pubkey = None
self.id = None
class ActiveUsers:
'''Класс - отображение таблицы активных пользователей.'''
def __init__(self, user_id, ip_address, port, login_time):
self.user = user_id
self.ip_address = ip_address
self.port = port
self.login_time = login_time
self.id = None
class LoginHistory:
'''Класс - отображение таблицы истории входов.'''
def __init__(self, name, date, ip, port):
self.id = None
self.name = name
self.date_time = date
self.ip = ip
self.port = port
class UsersContacts:
'''Класс - отображение таблицы контактов пользователей.'''
def __init__(self, user, contact):
self.id = None
self.user = user
self.contact = contact
class UsersHistory:
"""Класс - отображение таблицы истории действий."""
def __init__(self, user):
self.id = None
self.user = user
self.sent = 0
self.accepted = 0
def __init__(self, path):
# Создаём движок базы данных
self.database_engine = create_engine(
f'sqlite:///{path}',
echo=False,
pool_recycle=7200,
connect_args={
'check_same_thread': False})
# Создаём объект MetaData
self.metadata = MetaData()
# Создаём таблицу пользователей
users_table = Table('Users', self.metadata,
Column('id', Integer, primary_key=True),
Column('name', String, unique=True),
Column('last_login', DateTime),
Column('passwd_hash', String),
Column('pubkey', Text)
)
# Создаём таблицу активных пользователей
active_users_table = Table('Active_users', self.metadata,
Column('id', Integer, primary_key=True),
Column('user', ForeignKey('Users.id'), unique=True),
Column('ip_address', String),
Column('port', Integer),
Column('login_time', DateTime)
)
# Создаём таблицу истории входов
user_login_history = Table('Login_history', self.metadata,
Column('id', Integer, primary_key=True),
Column('name', ForeignKey('Users.id')),
Column('date_time', DateTime),
Column('ip', String),
Column('port', String)
)
# Создаём таблицу контактов пользователей
contacts = Table('Contacts', self.metadata,
Column('id', Integer, primary_key=True),
Column('user', ForeignKey('Users.id')),
Column('contact', ForeignKey('Users.id'))
)
# Создаём таблицу статистики пользователей
users_history_table = Table('History', self.metadata,
Column('id', Integer, primary_key=True),
Column('user', ForeignKey('Users.id')),
Column('sent', Integer),
Column('accepted', Integer)
)
# Создаём таблицы
self.metadata.create_all(self.database_engine)
# Создаём отображения
mapper(self.AllUsers, users_table)
mapper(self.ActiveUsers, active_users_table)
mapper(self.LoginHistory, user_login_history)
mapper(self.UsersContacts, contacts)
mapper(self.UsersHistory, users_history_table)
# Создаём сессию
Session = sessionmaker(bind=self.database_engine)
self.session = Session()
# Если в таблице активных пользователей есть записи, то их необходимо
# удалить
self.session.query(self.ActiveUsers).delete()
self.session.commit()
def user_login(self, username, ip_address, port, key):
"""
Метод выполняющийся при входе пользователя, записывает в базу факт входа
обновляет открытый ключ пользователя при его изменении.
"""
# Запрос в таблицу пользователей на наличие там пользователя с таким
# именем
rez = self.session.query(self.AllUsers).filter_by(name=username)
# Если имя пользователя уже присутствует в таблице, обновляем время последнего входа
# и проверяем корректность ключа. Если клиент прислал новый ключ,
# сохраняем его.
if rez.count():
user = rez.first()
user.last_login = datetime.datetime.now()
if user.pubkey != key:
user.pubkey = key
# Если нет, то генерируем исключение
else:
raise ValueError('Пользователь не зарегистрирован.')
# Теперь можно создать запись в таблицу активных пользователей о факте
# входа.
new_active_user = self.ActiveUsers(
user.id, ip_address, port, datetime.datetime.now())
self.session.add(new_active_user)
# и сохранить в историю входов
history = self.LoginHistory(
user.id, datetime.datetime.now(), ip_address, port)
self.session.add(history)
# Сохраняем изменения
self.session.commit()
def add_user(self, name, passwd_hash):
"""
Метод регистрации пользователя.
Принимает имя и хэш пароля, создаёт запись в таблице статистики.
"""
user_row = self.AllUsers(name, passwd_hash)
self.session.add(user_row)
self.session.commit()
history_row = self.UsersHistory(user_row.id)
self.session.add(history_row)
self.session.commit()
def remove_user(self, name):
"""Метод удаляющий пользователя из базы."""
user = self.session.query(self.AllUsers).filter_by(name=name).first()
self.session.query(self.ActiveUsers).filter_by(user=user.id).delete()
self.session.query(self.LoginHistory).filter_by(name=user.id).delete()
self.session.query(self.UsersContacts).filter_by(user=user.id).delete()
self.session.query(
self.UsersContacts).filter_by(
contact=user.id).delete()
self.session.query(self.UsersHistory).filter_by(user=user.id).delete()
self.session.query(self.AllUsers).filter_by(name=name).delete()
self.session.commit()
def get_hash(self, name):
"""Метод получения хэша пароля пользователя."""
user = self.session.query(self.AllUsers).filter_by(name=name).first()
return user.passwd_hash
def get_pubkey(self, name):
"""Метод получения публичного ключа пользователя."""
user = self.session.query(self.AllUsers).filter_by(name=name).first()
return user.pubkey
def check_user(self, name):
"""Метод проверяющий существование пользователя."""
if self.session.query(self.AllUsers).filter_by(name=name).count():
return True
else:
return False
def user_logout(self, username):
"""Метод фиксирующий отключения пользователя."""
# Запрашиваем пользователя, что покидает нас
user = self.session.query(
self.AllUsers).filter_by(
name=username).first()
# Удаляем его из таблицы активных пользователей.
self.session.query(self.ActiveUsers).filter_by(user=user.id).delete()
# Применяем изменения
self.session.commit()
def process_message(self, sender, recipient):
"""Метод записывающий в таблицу статистики факт передачи сообщения."""
# Получаем ID отправителя и получателя
sender = self.session.query(
self.AllUsers).filter_by(
name=sender).first().id
recipient = self.session.query(
self.AllUsers).filter_by(
name=recipient).first().id
# Запрашиваем строки из истории и увеличиваем счётчики
sender_row = self.session.query(
self.UsersHistory).filter_by(
user=sender).first()
sender_row.sent += 1
recipient_row = self.session.query(
self.UsersHistory).filter_by(
user=recipient).first()
recipient_row.accepted += 1
self.session.commit()
def add_contact(self, user, contact):
"""Метод добавления контакта для пользователя."""
# Получаем ID пользователей
user = self.session.query(self.AllUsers).filter_by(name=user).first()
contact = self.session.query(
self.AllUsers).filter_by(
name=contact).first()
# Проверяем что не дубль и что контакт может существовать (полю
# пользователь мы доверяем)
if not contact or self.session.query(
self.UsersContacts).filter_by(
user=user.id,
contact=contact.id).count():
return
# Создаём объект и заносим его в базу
contact_row = self.UsersContacts(user.id, contact.id)
self.session.add(contact_row)
self.session.commit()
# Функция удаляет контакт из базы данных
def remove_contact(self, user, contact):
"""Метод удаления контакта пользователя."""
# Получаем ID пользователей
user = self.session.query(self.AllUsers).filter_by(name=user).first()
contact = self.session.query(
self.AllUsers).filter_by(
name=contact).first()
# Проверяем что контакт может существовать (полю пользователь мы
# доверяем)
if not contact:
return
# Удаляем требуемое
self.session.query(self.UsersContacts).filter(
self.UsersContacts.user == user.id,
self.UsersContacts.contact == contact.id
).delete()
self.session.commit()
def users_list(self):
"""Метод возвращающий список известных пользователей со временем последнего входа."""
# Запрос строк таблицы пользователей.
query = self.session.query(
self.AllUsers.name,
self.AllUsers.last_login
)
# Возвращаем список кортежей
return query.all()
def active_users_list(self):
"""Метод возвращающий список активных пользователей."""
# Запрашиваем соединение таблиц и собираем кортежи имя, адрес, порт,
# время.
query = self.session.query(
self.AllUsers.name,
self.ActiveUsers.ip_address,
self.ActiveUsers.port,
self.ActiveUsers.login_time
).join(self.AllUsers)
# Возвращаем список кортежей
return query.all()
def login_history(self, username=None):
"""Метод возвращающий историю входов."""
# Запрашиваем историю входа
query = self.session.query(
self.AllUsers.name,
self.LoginHistory.date_time,
self.LoginHistory.ip,
self.LoginHistory.port
).join(self.AllUsers)
# Если было указано имя пользователя, то фильтруем по нему
if username:
query = query.filter(self.AllUsers.name == username)
# Возвращаем список кортежей
return query.all()
def get_contacts(self, username):
"""Метод возвращающий список контактов пользователя."""
# Запрашиваем указанного пользователя
user = self.session.query(self.AllUsers).filter_by(name=username).one()
# Запрашиваем его список контактов
query = self.session.query(self.UsersContacts, self.AllUsers.name). \
filter_by(user=user.id). \
join(self.AllUsers, self.UsersContacts.contact == self.AllUsers.id)
# выбираем только имена пользователей и возвращаем их.
return [contact[1] for contact in query.all()]
def message_history(self):
"""Метод возвращающий статистику сообщений."""
query = self.session.query(
self.AllUsers.name,
self.AllUsers.last_login,
self.UsersHistory.sent,
self.UsersHistory.accepted
).join(self.AllUsers)
# Возвращаем список кортежей
return query.all()
# Отладка
if __name__ == '__main__':
test_db = ServerStorage('../server_database.db3')
test_db.user_login('test1', '192.168.1.113', 8080)
test_db.user_login('test2', '192.168.1.113', 8081)
print(test_db.users_list())
# print(test_db.active_users_list())
# test_db.user_logout('McG')
# print(test_db.login_history('re'))
# test_db.add_contact('test2', 'test1')
# test_db.add_contact('test1', 'test3')
# test_db.add_contact('test1', 'test6')
# test_db.remove_contact('test1', 'test3')
test_db.process_message('test1', 'test2')
print(test_db.message_history())
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/server/database.py
|
database.py
|
import socket
import logging
import sys
sys.path.append('../')
# метод определения модуля, источника запуска.
if sys.argv[0].find('client_dist') == -1:
# если не клиент то сервер!
logger = logging.getLogger('server_dist')
else:
# иначе сервер
logger = logging.getLogger('client_dist')
def log(func_to_log):
"""
Декоратор, выполняющий логирование вызовов функций.
Сохраняет события типа debug, содержащие
информацию о имени вызываемой функиции, параметры с которыми
вызывается функция, и модуль, вызывающий функцию.
"""
def log_saver(*args, **kwargs):
logger.debug(
f'Была вызвана функция {func_to_log.__name__} c параметрами {args} , {kwargs}. '
f'Вызов из модуля {func_to_log.__module__}')
ret = func_to_log(*args, **kwargs)
return ret
return log_saver
def login_required(func):
"""
Декоратор, проверяющий, что клиент авторизован на сервере.
Проверяет, что передаваемый объект сокета находится в
списке авторизованных клиентов.
За исключением передачи словаря-запроса
на авторизацию. Если клиент не авторизован,
генерирует исключение TypeError
"""
def checker(*args, **kwargs):
# проверяем, что первый аргумент - экземпляр MessageProcessor
# Импортить необходимо тут, иначе ошибка рекурсивного импорта.
from server.core import MessageProcessor
from common.variables import ACTION, PRESENCE
if isinstance(args[0], MessageProcessor):
found = False
for arg in args:
if isinstance(arg, socket.socket):
# Проверяем, что данный сокет есть в списке names класса
# MessageProcessor
for client in args[0].names:
if args[0].names[client] == arg:
found = True
# Теперь надо проверить, что передаваемые аргументы не presence
# сообщение. Если presence, то разрешаем
for arg in args:
if isinstance(arg, dict):
if ACTION in arg and arg[ACTION] == PRESENCE:
found = True
# Если не авторизован и не сообщение начала авторизации, то
# вызываем исключение.
if not found:
raise TypeError
return func(*args, **kwargs)
return checker
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/common/decos.py
|
decos.py
|
import dis
from pprint import pprint
# Метакласс для проверки соответствия сервера:
class ServerMaker(type):
def __init__(cls, clsname, bases, clsdict):
"""
:param clsname: - экземпляр метакласса - Server
:param bases: кортеж базовых классов - ()
:param clsdict: словарь атрибутов и методов экземпляра метакласса
"""
# Список методов, которые используются в функциях класса:
methods = [] # с помощью 'LOAD_GLOBAL'
methods_2 = [] # методы, обёрнутые декораторами попадают не в 'LOAD_GLOBAL', а в 'LOAD_METHOD'
# Атрибуты, используемые в функциях классов
attrs = []
for func in clsdict:
try:
ret = dis.get_instructions(clsdict[func])
except TypeError:
pass
else:
# Если функция разбираем код, получая используемые методы и атрибуты.
for i in ret:
print(i)
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
# заполняем список методами, использующимися в функциях класса
methods.append(i.argval)
elif i.opname == 'LOAD_METHOD':
if i.argval not in methods_2:
methods_2.append(i.argval)
elif i.opname == 'LOAD_ATTR':
if i.argval not in attrs:
# заполняем список атрибутами, использующимися в функциях класса
attrs.append(i.argval)
print(20 * '-', 'methods', 20 * '-')
pprint(methods)
print(20 * '-', 'methods_2', 20 * '-')
pprint(methods_2)
print(20 * '-', 'attrs', 20 * '-')
pprint(attrs)
print(50 * '-')
if 'connect' in methods:
raise TypeError('Использование метода connect недопустимо в серверном классе')
if not ('SOCK_STREAM' in attrs and 'AF_INET' in attrs):
raise TypeError('Некорректная инициализация сокета.')
# Вызываем конструктор предка
super().__init__(clsname, bases, clsdict)
# Метакласс для проверки корректности клиентов:
class ClientMaker(type):
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
for func in clsdict:
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
#Если функция разбираем код, получая используемые методы.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
# Если обнаружено использование недопустимого метода accept, listen, socket бросаем исключение:
for command in ('accept', 'listen', 'socket'):
if command in methods:
raise TypeError('В классе обнаружено использование запрещённого метода')
# Вызов get_message или send_message из utils считаем корректным использованием сокетов
if 'get_message' in methods or 'send_message' in methods:
pass
else:
raise TypeError('Отсутствуют вызовы функций, работающих с сокетами.')
super().__init__(clsname, bases, clsdict)
|
ApplicationClientServer-server
|
/ApplicationClientServer_server-0.1-py3-none-any.whl/common/metaclasses.py
|
metaclasses.py
|
def get_key(d, key, delimiter='/'):
""" Get value
>>> get_key(dict(a=dict(b=2)), "a/b")
2
:param d:
:param key:
:param delimiter:
:return:
"""
if key == "":
return d
r = d
for k in key.split(delimiter):
r = r[k]
return r
def set_key(d, key, value, delimiter='/'):
""" Set value
>>> set_key(dict(a=dict(b=2)), "a/b", 3)
{'a': {'b': 3}}
"""
keys = key.split(delimiter)
path, last = keys[:-1], keys[-1]
r = d
for k in path:
r = r[k]
r[last] = value
return d
def join_prefix(x, y, delimiter="/"):
""" Join prefixes
:param x:
:param y:
:param delimiter:
:return:
"""
if x == "":
return y
return f"{x.rstrip(delimiter)}{delimiter}{y}"
def collect_keys(d):
""" Collect all keys to be processed (flatten hierarchical data structure)
>>> collect_keys(dict(a=dict(b="2")))
['a/b']
>>> collect_keys(dict(a=dict(b="2", c=dict(d="3")), t="1"))
['t', 'a/b', 'a/c/d']
:param d:
:return:
"""
root = ""
leaves = []
prefixes = [root]
while True:
if len(prefixes) == 0:
break
newprefixes = []
for prefix in prefixes:
for k, v in get_key(d, prefix).items():
if isinstance(v, dict):
newprefixes.append(join_prefix(prefix, k))
elif isinstance(v, str):
leaves.append(join_prefix(prefix, k))
prefixes = newprefixes
return leaves
def apply(schema, environ):
""" Walk the supplied schema tree, replacing leaf names with values from environ.
>>> apply_env(dict(a='b'), environ=dict(b='vb'))
{'a': 'vb'}
>>> apply_env(dict(a=dict(b='c')), environ=dict(c='vc'))
{'a': {'b': 'vc'}}
"""
result = dict(schema)
config_keys = collect_keys(schema)
for config_key in config_keys:
var_name = get_key(schema, config_key)
var_value = environ[var_name]
result = set_key(result, config_key, var_value)
return result
|
ApplyEnv
|
/ApplyEnv-1.0.57.tar.gz/ApplyEnv-1.0.57/applyenv/applyenv.py
|
applyenv.py
|
from sys import argv
import os
from platform import platform
def main():
# Help Message
def help_str():
print("Create an app from an Binary!\n\nUsage: appmaker [script] [--options]"
"\n\nOptions:\n"
" -n or --name for the name of the app\n"
" -i or --icon for an icon file (must be .icns\n"
" -a or --author for an author\n"
" -v or --version for a version number (must be a string)\n\n"
"Example: appmaker ./main.py --name \"My App\" --icon \"./assets/icon.icns\" --author \"Pixel Master\" "
"--version \"1.0\"\n\n "
"appmaker -h or appmaker --help for this page")
# Function for building.app
def build_app():
# Name
if "-n" in argv or "--name" in argv:
if "-n" in argv:
name = argv[argv.index("-n") + 1]
else:
name = argv[argv.index("--name") + 1]
else:
name = os.path.basename(argv[1])
pure_name = name
name = f"{name}.app"
# Icon
if "-i" in argv or "--icon" in argv:
if "-i" in argv:
icon = argv[argv.index("-i") + 1]
else:
icon = argv[argv.index("--icon") + 1]
if not os.path.exists(icon):
raise OSError("Item path is wrong")
else:
icon = None
# Author
if "-a" in argv or "--author" in argv:
if "-a" in argv:
author = argv[argv.index("-a") + 1]
else:
author = argv[argv.index("--author") + 1]
else:
author = os.path.basename(os.path.expanduser("~"))
# Version
if "-v" in argv or "--version" in argv:
if "-v" in argv:
version = argv[argv.index("-v") + 1]
else:
version = argv[argv.index("--version") + 1]
else:
version = "1.0"
# Constructing.app
# Creating dirs
maindir = os.path.join(name, "Contents")
macosdir = os.path.join(maindir, "MacOS")
ressourcedir = os.path.join(maindir, "Resources")
os.makedirs(maindir, exist_ok=True)
os.makedirs(macosdir, exist_ok=True)
os.makedirs(ressourcedir, exist_ok=True)
# Copying Binary
with open(argv[1], "rb") as BinaryFile:
Binary = BinaryFile.read()
with open(os.path.join(macosdir, pure_name), "wb") as BinaryFileCopy:
BinaryFileCopy.write(Binary)
os.system("chmod +x " + os.path.join(macosdir, pure_name).replace(" ", "\ "))
# Copying Icon
if icon is not None:
with open(icon, "rb") as IconFile:
IconBinary = IconFile.read()
with open(os.path.join(ressourcedir, "Icon.icns"), "wb") as IconFileCopy:
IconFileCopy.write(IconBinary)
del Binary
# Creating info.plist
with open(os.path.join(maindir, "Info.plist"), "w") as infofile:
infofile.write(f"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
f"\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" "
f"\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\"> "
f"\n<plist version=\"1.0\">"
f"\n<dict>"
f"\n <key>CFBundleAllowMixedLocalizations</key>"
f"\n <true/>"
f"\n <key>CFBundleExecutable</key>"
f"\n <string>{pure_name}</string>"
f"\n <key>CFBundleIconFile</key>"
f"\n <string>Icon</string>"
f"\n <key>CFBundleIconName</key>"
f"\n <string>Icon</string>"
f"\n <key>CFBundleShortVersionString</key>"
f"\n <string>{version}</string>"
f"\n <key>NSHumanReadableCopyright</key>"
f"\n <string>{author}</string>"
f"\n</dict>"
f"\n</plist>")
# Help Message
if "-h" in argv or "--help" in argv:
help_str()
# create .app
elif platform() != "darwin":
try:
if os.path.exists(argv[1]):
build_app()
else:
print(f"No Binary found under: {argv[1]}\n\nBinary must be first argument!")
except IndexError:
help_str()
print("\n\nYou need to include a binary!")
else:
print("appmaker only supports macOS at the moment!\nFeel free to contribute!")
|
Appmaker
|
/Appmaker-0.2-py3-none-any.whl/appmaker/appmaker.py
|
appmaker.py
|
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1
through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or
are under common control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii)
beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source
code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including
but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as
indicated by a copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work
and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an
original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or
additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the
Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright
owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including
but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems
that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "
Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been
received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to
You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce,
prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such
Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a
perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise
transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are
necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (
including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within
the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this
License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with
or without modifications, and in Source or Object form, provided that You meet the following conditions:
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent,
trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to
any part of the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You
distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those
notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a
NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided
along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such
third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not
modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be
construed as modifying the License.
You may add Your own copyright statement to Your modifications and may provide additional or different license terms
and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a
whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in
this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for
inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any
additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any
separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product
names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and
reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and
each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT,
MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness
of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this
License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or
otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing,
shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or
consequential damages of any character arising as a result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or
any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such
damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose
to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or
rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and
on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and
hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Christian Heider Nielsen
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "
AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.
|
Apppath
|
/Apppath-1.0.3.tar.gz/Apppath-1.0.3/LICENSE.md
|
LICENSE.md
|
# Security Policy
## Supported Versions
Use this section to tell people about which versions of your project are currently being supported with security
updates.
| Version | Supported |
| ------- | ------------------ |
| 5.1.x | :white_check_mark: |
| 5.0.x | :x: |
| 4.0.x | :white_check_mark: |
| < 4.0 | :x: |
## Reporting a Vulnerability
Use this section to tell people how to report a vulnerability.
Tell them where to go, how often they can expect to get an update on a reported vulnerability, what to expect if the
vulnerability is accepted or declined, etc.
|
Apppath
|
/Apppath-1.0.3.tar.gz/Apppath-1.0.3/SECURITY.md
|
SECURITY.md
|
<!---->
<p align="center">
<img src=".github/images/apppath.svg" alt='AppPath' />
</p>
<h1 align="center">AppPath</h1>
<!--# AppPath-->
| [](https://pything.github.io/apppath/) | [](https://travis-ci.com/pything/apppath) | [](https://github.com/ambv/black) | [](https://coveralls.io/github/pything/apppath?branch=master) | [](https://lgtm.com/projects/g/pything/apppath/alerts/) | [](https://lgtm.com/projects/g/pything/apppath/context:python) |
|-----------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Workflows |
|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|  |
|  |
|  |
___
> Clutter-free app data

___
A class and a set of functions for providing for system-consensual path for apps to store data, logs, cache...
|
Apppath
|
/Apppath-1.0.3.tar.gz/Apppath-1.0.3/README.md
|
README.md
|
__author__ = "Christian Heider Nielsen"
__doc__ = ""
__all__ = ["get_win_folder"]
from typing import Any
from warg.os_utilities.platform_selection import SYSTEM_, is_py3
if is_py3():
unicode = str
def _get_win_folder_from_registry(csidl_name: Any) -> Any:
"""This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names."""
if is_py3():
import winreg as _winreg
else:
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
)
ddir, ttype = _winreg.QueryValueEx(key, shell_folder_name)
return ddir
'''
import ctypes
from ctypes.wintypes import HWND, UINT, WPARAM, LPARAM, LPVOID
LRESULT = LPARAM # synonymous
import os
import sys
try:
import winreg
unicode = str
except ImportError:
import _winreg as winreg # Python 2.x
class Environment(object):
path = r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'
hklm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
key = winreg.OpenKey(hklm, path, 0, winreg.KEY_READ | winreg.KEY_WRITE)
SendMessage = ctypes.windll.user32.SendMessageW
SendMessage.argtypes = HWND, UINT, WPARAM, LPVOID
SendMessage.restype = LRESULT
HWND_BROADCAST = 0xFFFF
WM_SETTINGCHANGE = 0x1A
NO_DEFAULT_PROVIDED = object()
def get(self, name, default=NO_DEFAULT_PROVIDED):
try:
value = winreg.QueryValueEx(self.key, name)[0]
except WindowsError:
if default is self.NO_DEFAULT_PROVIDED:
raise ValueError("No such registry key", name)
value = default
return value
def set(self, name, value):
if value:
winreg.SetValueEx(self.key, name, 0, winreg.REG_EXPAND_SZ, value)
else:
winreg.DeleteValue(self.key, name)
self.notify()
def notify(self):
self.SendMessage(self.HWND_BROADCAST, self.WM_SETTINGCHANGE, 0, u'Environment')
Environment = Environment() # singletion - create instance
PATH_VAR = 'PATH'
def append_path_envvar(addpath):
def canonical(path):
path = unicode(path.upper().rstrip(os.sep))
return winreg.ExpandEnvironmentStrings(path) # Requires Python 2.6+
canpath = canonical(addpath)
curpath = Environment.get(PATH_VAR, '')
if not any(canpath == subpath
for subpath in canonical(curpath).split(os.pathsep)):
Environment.set(PATH_VAR, os.pathsep.join((curpath, addpath)))
def remove_envvar_path(folder):
""" Remove *all* paths in PATH_VAR that contain the folder path. """
curpath = Environment.get(PATH_VAR, '')
folder = folder.upper()
keepers = [subpath for subpath in curpath.split(os.pathsep)
if folder not in subpath.upper()]
Environment.set(PATH_VAR, os.pathsep.join(keepers))
'''
"""
import _winreg as reg
import win32gui
import win32con
# read the value
key = reg.OpenKey(reg.HKEY_CURRENT_USER, 'Environment', 0, reg.KEY_ALL_ACCESS)
# use this if you need to modify the system variable and if you have admin privileges
#key = reg.OpenKey(reg.HKEY_LOCAL_MACHINE, r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment', 0, reg.KEY_ALL_ACCESS)
try
value, _ = reg.QueryValueEx(key, 'PATH')
except WindowsError:
# in case the PATH variable is undefined
value = ''
# modify it
value = ';'.join([s for s in value.split(';') if not r'\myprogram' in s])
# write it back
reg.SetValueEx(key, 'PATH', 0, reg.REG_EXPAND_SZ, value)
reg.CloseKey(key)
# notify the system about the changes
win32gui.SendMessage(win32con.HWND_BROADCAST, win32con.WM_SETTINGCHANGE, 0, 'Environment')
"""
def _get_win_folder_with_pywin32(csidl_name: Any) -> Any:
from win32com.shell import shellcon, shell
ddir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
# Try to make this a unicode path because SHGetFolderPath does
# not return unicode strings when there is unicode data in the
# path.
try:
ddir = unicode(ddir)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in ddir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
try:
import win32api
ddir = win32api.GetShortPathName(ddir)
except ImportError:
pass
except UnicodeError:
pass
return ddir
def _get_win_folder_with_ctypes(csidl_name: Any) -> Any:
from ctypes import windll, create_unicode_buffer
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
}[csidl_name]
buf = create_unicode_buffer(1024)
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf2 = create_unicode_buffer(1024)
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
return buf.value
def _get_win_folder_with_jna(csidl_name: Any) -> Any:
import array
from com.sun import jna
from com.sun.jna.platform import win32
buf_size = win32.WinDef.MAX_PATH * 2
buf = array.zeros("c", buf_size)
shell = win32.Shell32.INSTANCE
shell.SHGetFolderPath(
None,
getattr(win32.ShlObj, csidl_name),
None,
win32.ShlObj.SHGFP_TYPE_CURRENT,
buf,
)
ddir = jna.Native.toString(buf.tostring()).rstrip("\0")
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in ddir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf = array.zeros("c", buf_size)
kernel = win32.Kernel32.INSTANCE
if kernel.GetShortPathName(ddir, buf, buf_size):
ddir = jna.Native.toString(buf.tostring()).rstrip("\0")
return ddir
get_win_folder = None
if SYSTEM_ == "win32": # IMPORT TESTS
try:
from win32com import shell
get_win_folder = _get_win_folder_with_pywin32
except ImportError:
try:
from ctypes import windll
get_win_folder = _get_win_folder_with_ctypes
except ImportError:
try:
from com.sun import jna
get_win_folder = _get_win_folder_with_jna
except ImportError:
get_win_folder = _get_win_folder_from_registry
|
Apppath
|
/Apppath-1.0.3.tar.gz/Apppath-1.0.3/apppath/windows_path_utilities.py
|
windows_path_utilities.py
|
import datetime
import os
from importlib.metadata import PackageNotFoundError
from warnings import warn
from typing import Any
from importlib import resources
from warg import package_is_editable
__project__ = "Apppath"
__author__ = "Christian Heider Nielsen"
__version__ = "1.0.3"
__doc__ = r"""
Created on 27/04/2019
A class and a set of functions for providing for system-consensual path for apps to store data, logs, cache...
@author: cnheider
"""
__all__ = [
"PROJECT_APP_PATH",
"PROJECT_NAME",
"PROJECT_VERSION",
"get_version",
"PROJECT_ORGANISATION",
"PROJECT_AUTHOR",
"PROJECT_YEAR",
"AppPath",
"AppPathSubDirEnum",
"open_app_path"
# "INCLUDE_PROJECT_READMES",
# "PACKAGE_DATA_PATH"
]
from .app_path import *
from .system_open_path_utilities import *
PROJECT_NAME = __project__.lower().strip().replace(" ", "_")
PROJECT_VERSION = __version__
PROJECT_YEAR = 2018
PROJECT_AUTHOR = __author__.lower().strip().replace(" ", "_")
PROJECT_ORGANISATION = "Pything"
PACKAGE_DATA_PATH = resources.files(PROJECT_NAME) / "data"
try:
DEVELOP = package_is_editable(PROJECT_NAME)
except PackageNotFoundError as e:
DEVELOP = True
def get_version(append_time: Any = DEVELOP) -> str:
"""description"""
version = __version__
if not version:
version = os.getenv("VERSION", "0.0.0")
if append_time:
now = datetime.datetime.utcnow()
date_version = now.strftime("%Y%m%d%H%M%S")
# date_version = time.time()
if version:
# Most git tags are prefixed with 'v' (example: v1.2.3) this is
# never desirable for artifact repositories, so we strip the
# leading 'v' if it's present.
version = version[1:] if isinstance(version, str) and version.startswith("v") else version
else:
# Default version is an ISO8601 compliant datetime. PyPI doesn't allow
# the colon ':' character in its versions, and time is required to allow
# for multiple publications to master in one day. This datetime string
# uses the 'basic' ISO8601 format for both its date and time components
# to avoid issues with the colon character (ISO requires that date and
# time components of a date-time string must be uniformly basic or
# extended, which is why the date component does not have dashes.
#
# Publications using datetime versions should only be made from master
# to represent the HEAD moving forward.
warn(f"Environment variable VERSION is not set, only using datetime: {date_version}")
# warn(f'Environment variable VERSION is not set, only using timestamp: {version}')
version = f"{version}.{date_version}"
return version
if __version__ is None:
__version__ = get_version(append_time=True)
__version_info__ = tuple(int(segment) for segment in __version__.split("."))
PROJECT_APP_PATH = AppPath(app_name=PROJECT_NAME, app_author=PROJECT_AUTHOR)
|
Apppath
|
/Apppath-1.0.3.tar.gz/Apppath-1.0.3/apppath/__init__.py
|
__init__.py
|
__author__ = "Christian Heider Nielsen"
__doc__ = "Application data directories extension for pathlib"
__all__ = ["AppPath"]
import os
import shutil
from pathlib import Path
from apppath.windows_path_utilities import get_win_folder
from warg import ensure_existence, sanitise_path
from warg.os_utilities.platform_selection import SystemEnum, get_system
class AppPath(object):
r"""
AppPath class for easing cross-platform access to proper app data directories
This class is an abstraction for getting system conventional application paths for data, logs, etc.
"""
def __init__(
self,
app_name: str,
app_author: str = None,
app_version: str = None,
roaming: bool = False,
multi_path: bool = False,
ensure_existence_on_access: bool = True,
normalise_path: bool = True,
):
r"""
Typical user data directories are:
Mac OS X: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application
Data\<AppAuthor>\<AppName>
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
Typical user config directories are:
Mac OS X: ~/Library/Preferences/<AppName>
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
Typical site config directories are:
Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
Win *: same as site_data_dir
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application
Data\<AppAuthor>\<AppName>\Logs
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
Typical user state directories are:
Mac OS X: same as user_data_dir
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
Win *: same as user_data_dir
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Win XP: C:\Documents and Settings\<username>\Local Settings\Application
Data\<AppAuthor>\<AppName>\Cache
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
:param app_name:
:param app_author:
:param app_version:
:param roaming: "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues.
:param multi_path: "multi_path" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/<AppName>', if XDG_DATA_DIRS is not set
:param ensure_existence_on_access:
:param normalise_path:"""
assert isinstance(app_name, str), f"app_name must be a str, not {type(app_name)}"
self._sanitise_path = normalise_path
if normalise_path:
app_name = app_name.strip().lower().replace(" ", "_")
self._app_name = app_name
if app_author:
if isinstance(app_author, str):
if normalise_path:
app_author = app_author.strip().lower().replace(" ", "_")
else:
assert (
app_author is None
), f"{type(app_author)} is not accepted as app_author, must be of type str or None"
self._app_author = app_author
if app_version:
if isinstance(app_version, (int, float)):
app_version = str(app_version)
if isinstance(app_version, str):
if normalise_path:
app_version = app_version.strip().lower().replace(" ", "_")
else:
raise TypeError(
f"app_version was of type {type(app_version)}, only int, float and str is supported"
)
self._app_version = app_version
self._roaming = roaming
self._multi_path = multi_path
self._ensure_existence = ensure_existence_on_access
def __divmod__(self, other):
return self.__truediv__(other)
def __truediv__(self, other):
raise ArithmeticError(
"The AppPath class itself is not a Path, you should use one of it path properties ("
'e.g. ".user_data" or ".user_config")'
)
@property
def user_data(self) -> Path:
"""
User data path
:return:
:rtype:"""
return ensure_existence(
self._user_data_path(
self._app_name,
self._app_author,
version=self._app_version,
roaming=self._roaming,
),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@property
def site_data(self) -> Path:
"""
Site data path
:return:
:rtype:"""
return ensure_existence(
self._site_data_path(
self._app_name,
self._app_author,
version=self._app_version,
multi_path=self._multi_path,
),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@property
def user_config(self) -> Path:
"""
User config path
:return:
:rtype:"""
return ensure_existence(
self._user_config_path(
self._app_name,
self._app_author,
version=self._app_version,
roaming=self._roaming,
),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@property
def site_config(self) -> Path:
"""
Site config path
:return:
:rtype:"""
return ensure_existence(
self._site_config_path(
self._app_name,
self._app_author,
version=self._app_version,
multi_path=self._multi_path,
),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@property
def root_cache(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/var/cache") / self._app_name, enabled=self._ensure_existence)
@property
def root_config(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/etc") / self._app_name, enabled=self._ensure_existence)
@property
def root_log(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/var/log") / self._app_name, enabled=self._ensure_existence)
@property
def root_state(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/var/lib") / self._app_name, enabled=self._ensure_existence)
@property
def root_run(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/run") / self._app_name, enabled=self._ensure_existence)
@property
def root_tmp(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/tmp") / self._app_name, enabled=self._ensure_existence)
@property
def root_long_tmp(self) -> Path:
"""description
:return:
"""
if get_system() != SystemEnum.linux:
raise SystemError(f"Invalid system: {get_system()}\n Expected {SystemEnum.linux}")
return ensure_existence(Path("/var/tmp") / self._app_name, enabled=self._ensure_existence)
@property
def site_cache(self) -> Path:
"""
TODO: Nonsense, is same as user_cache
Returns:
"""
return self.user_cache
@property
def site_log(self) -> Path:
"""
TODO: Nonsense, is same as user_log
Returns:
"""
return self.user_log
@property
def user_cache(self) -> Path:
"""
User cache path
:return:
:rtype:"""
return ensure_existence(
self._user_cache_path(self._app_name, self._app_author, version=self._app_version),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@property
def user_state(self) -> Path:
"""
User state path
:return:
:rtype:"""
return ensure_existence(
self._user_state_path(self._app_name, self._app_author, version=self._app_version),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@property
def user_log(self) -> Path:
"""
User log path
:return:
:rtype:"""
return ensure_existence(
self._user_log_path(self._app_name, self._app_author, version=self._app_version),
sanitisation_func=sanitise_path if self._sanitise_path else None,
enabled=self._ensure_existence,
)
@staticmethod
def _user_data_path(
app_name: str = None,
app_author: str = None,
version: str = None,
roaming: bool = False,
) -> Path:
r"""Return full path to the user-specific data dir for this application.
"app_name" is the name of application.
If None, just the system directory is returned.
"app_author" (only used on Windows) is the name of the
app_author or distributing body for this application. Typically
it is the owning company name. This falls back to app_name. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when app_name is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Notes:
- MSDN on where to store app data files:
http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
- Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
- XDG spec for Un*x: https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
Typical user data directories are:
Mac OS X: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application
Data\<AppAuthor>\<AppName>
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
That means, by default "~/.local/share/<AppName>"."""
if get_system() == SystemEnum.windows:
if app_author is None:
app_author = app_name
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
path_ = Path(os.path.normpath(get_win_folder(const)))
elif get_system() == SystemEnum.mac:
path_ = Path.home() / "Library" / "Application Support"
elif get_system() == SystemEnum.linux:
path_ = Path(os.getenv("XDG_DATA_HOME", Path.home() / ".local" / "share"))
else:
raise SystemError(f"Invalid system {get_system()}")
if app_author:
path_ = path_ / app_author
if app_name:
path_ /= app_name
if app_name and version:
path_ /= version
return path_
@staticmethod
def _site_data_path(
app_name: str = None,
app_author: str = None,
version: str = None,
multi_path: bool = False,
) -> Path:
r"""Return full path to the user-shared data dir for this application.
"app_name" is the name of application.
If None, just the system directory is returned.
"app_author" (only used on Windows) is the name of the
app_author or distributing body for this application. Typically
it is the owning company name. This falls back to app_name. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when app_name is present.
"multi_path" is an optional parameter only applicable to *nix
which indicates that the entire list of data dirs should be
returned. By default, the first item from XDG_DATA_DIRS is
returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set
Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
For Unix, this is using the $XDG_DATA_DIRS[0] default.
WARNING: Do not use this on Windows. See the Vista-Fail note above for why."""
if get_system() == SystemEnum.windows:
if app_author is None:
app_author = app_name
path = Path(os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA")))
elif get_system() == SystemEnum.mac:
path = Path.home() / "Library" / "Application Support"
elif get_system() == SystemEnum.linux:
# XDG default for $XDG_DATA_DIRS
# only first, if multipath is False
path = os.getenv("XDG_DATA_DIRS", os.pathsep.join(["/usr/local/share", "/usr/share"]))
path_list = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if app_name:
if version:
app_name = Path(app_name) / version
path_list = [Path(x) / app_name for x in path_list]
if multi_path:
path = os.pathsep.join([str(a) for a in path_list])
else:
path_list = [Path(a) for a in path_list]
path = path_list[0]
return path
else:
raise SystemError(f"Invalid system {get_system()}")
if app_author:
path = path / app_author
if app_name:
path /= app_name
if app_name and version:
path /= version
return path
@staticmethod
def _user_config_path(
app_name: str = None,
app_author: str = None,
version: str = None,
roaming: bool = False,
) -> Path:
r"""Return full path to the user-specific config dir for this application.
"app_name" is the name of application.
If None, just the system directory is returned.
"app_author" (only used on Windows) is the name of the
app_author or distributing body for this application. Typically
it is the owning company name. This falls back to app_name. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when app_name is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user config directories are:
Mac OS X: ~/Library/Preferences/<AppName>
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by default "~/.config/<AppName>"."""
if get_system() == SystemEnum.windows:
if app_author is None:
app_author = app_name
path = AppPath._user_data_path(app_name, app_author, None, roaming)
elif get_system() == SystemEnum.mac:
path = Path.home() / "Library" / "Preferences"
if app_author:
path = path / app_author
if app_name:
path /= app_name
elif get_system() == SystemEnum.linux:
path = Path(os.getenv("XDG_CONFIG_HOME", Path.home() / ".config"))
if app_author:
path = path / app_author
if app_name:
path /= app_name
else:
raise SystemError(f"Invalid system {get_system()}")
if app_name and version:
path /= version
return path
@staticmethod
def _site_config_path(
app_name: str = None,
app_author: str = None,
version: str = None,
multi_path: bool = False,
) -> Path:
r"""Return full path to the user-shared data dir for this application.
"app_name" is the name of application.
If None, just the system directory is returned.
"app_author" (only used on Windows) is the name of the
app_author or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when app_name is present.
"multi_path" is an optional parameter only applicable to *nix
which indicates that the entire list of config dirs should be
returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
Typical site config directories are:
Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
Win *: same as site_data_dir
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
WARNING: Do not use this on Windows. See the Vista-Fail note above for why."""
if get_system() == SystemEnum.windows:
if app_author is None:
app_author = app_name
path = AppPath._site_data_path(app_name, app_author)
elif get_system() == SystemEnum.mac:
path = Path.home() / "Library" / "Preferences"
if app_author:
path = path / app_author
if app_name:
path /= app_name
elif get_system() == SystemEnum.linux:
# XDG default for $XDG_CONFIG_DIRS
# only first, if multi_path is False
path = os.getenv("XDG_CONFIG_DIRS", "/etc/xdg")
path_list = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if app_name:
if version:
app_name = Path(app_name) / version
path_list = [Path(x) / app_name for x in path_list]
if multi_path:
path = os.pathsep.join([str(a) for a in path_list])
else:
# path_list = [Path(a) for a in path_list]
path = path_list[0]
if app_author:
path = path / app_author
if app_name:
path /= app_name
else:
raise SystemError(f"Invalid system {get_system()}")
if app_name and version:
path /= version
return path
@staticmethod
def _user_cache_path(
app_name: str = None,
app_author: str = None,
version: str = None,
opinionated: bool = True,
) -> Path:
r"""Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinionated" (boolean) can be False to disable the appending of
"Cache" to the base app data dir for Windows. See
discussion below.
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Win XP: C:\Documents and Settings\<username>\Local Settings\Application
Data\<AppAuthor>\<AppName>\Cache
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go in
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
app data dir (the default returned by `user_data_dir` above). Apps typically
put cache data somewhere *under* the given dir here. Some examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
This can be disabled with the `opinionated=False` option."""
preversion = []
if get_system() == SystemEnum.windows:
if app_author is None:
app_author = app_name
path = Path(os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA")))
if opinionated:
preversion += ["Cache"]
elif get_system() == SystemEnum.mac:
path = Path.home() / "Library" / "Caches"
elif get_system() == SystemEnum.linux:
path = Path(os.getenv("XDG_CACHE_HOME", Path.home() / ".cache"))
else:
raise SystemError(f"Invalid system {get_system()}")
if app_author:
path = path / app_author
if app_name:
path /= app_name
for p in preversion:
path /= p
if app_name and version:
path /= version
return path
@staticmethod
def _user_state_path(
app_name: str = None,
app_author: str = None,
version: str = None,
roaming: bool = False,
) -> Path:
r"""Return full path to the user-specific state dir for this application.
"app_name" is the name of application.
If None, just the system directory is returned.
"app_author" (only used on Windows) is the name of the
app_author or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when app_name is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user state directories are:
Mac OS X: same as user_data_dir
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
to extend the XDG spec and support $XDG_STATE_HOME.
That means, by default "~/.local/state/<AppName>"."""
if get_system() in [SystemEnum.windows, SystemEnum.mac]:
if app_author is None:
app_author = app_name
path = AppPath._user_data_path(app_name, app_author, None, roaming)
elif get_system() == SystemEnum.linux:
path = Path(os.getenv("XDG_STATE_HOME", Path.home() / ".local" / "state"))
if app_author:
path /= app_author
if app_name:
path /= app_name
else:
raise SystemError(f"Invalid system {get_system()}")
if app_name and version:
path /= version
return path
@staticmethod
def _user_log_path(
app_name: str = None,
app_author: str = None,
version: str = None,
opinionated: bool = True,
) -> Path:
r"""Return full path to the user-specific log dir for this application.
"app_name" is the name of application.
If None, just the system directory is returned.
"app_author" (only used on Windows) is the name of the
app_author or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when app_name is present.
"opinionated" (boolean) can be False to disable the appending of
"Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below.
Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application
Data\<AppAuthor>\<AppName>\Logs
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
On Windows the only suggestion in the MSDN docs is that local settings
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
examples of what some windows apps use for a logs dir.)
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
value for Windows and appends "log" to the user cache dir for Unix.
This can be disabled with the `opinionated=False` option."""
preversion = []
if get_system() == SystemEnum.mac:
path = Path.home() / "Library" / "Logs" / app_name
elif get_system() == SystemEnum.windows:
if app_author is None:
app_author = app_name
path = AppPath._user_data_path(app_name, app_author, version)
version = False
if opinionated:
preversion += ["Logs"]
elif get_system() == SystemEnum.linux:
path = AppPath._user_cache_path(app_name, app_author, version)
version = False
if opinionated:
preversion += ["log"]
else:
raise NotImplementedError(f"System {get_system()} not supported")
for p in preversion:
path /= p
if app_name and version:
path /= version
return path
def clean(self, confirm: bool = True) -> None:
"""
Remove all apppath data
:param confirm:
:type confirm:"""
if confirm:
self.clean_user_log()
self.clean_user_config()
self.clean_user_data()
self.clean_user_cache()
self.clean_site_config()
self.clean_site_data()
def clean_user_log(self) -> None:
"""description"""
if self.user_log.exists():
shutil.rmtree(self.user_log)
def clean_user_config(self) -> None:
"""description"""
if self.user_config.exists():
shutil.rmtree(self.user_config)
def clean_user_data(self) -> None:
"""description"""
if self.user_data.exists():
shutil.rmtree(self.user_data)
def clean_user_cache(self) -> None:
"""description"""
if self.user_cache.exists():
shutil.rmtree(self.user_cache)
def clean_site_config(self) -> None:
"""description"""
if self.site_config.exists():
shutil.rmtree(self.site_config)
def clean_site_data(self) -> None:
"""description"""
if self.site_data.exists():
shutil.rmtree(self.site_data)
@property
def app_name(self):
"""description"""
return self._app_name
@property
def app_author(self):
"""description"""
return self._app_author
def app_version(self):
"""description"""
return self._app_version
if __name__ == "__main__":
def main():
"""description"""
_app_name = "MyApp"
_app_author = __author__
props = (
"user_data",
"user_config",
"user_cache",
"user_state",
"user_log",
"site_data",
"site_config",
)
print("-- app dirs (with optional 'version')")
dirs = AppPath(_app_name, _app_author, app_version="1.0", ensure_existence_on_access=False)
for prop in props:
print(f"{prop}: {getattr(dirs, prop)}")
print("\n-- app dirs (without optional 'version')")
dirs = AppPath(_app_name, _app_author, ensure_existence_on_access=False)
for prop in props:
print(f"{prop}: {getattr(dirs, prop)}")
print("\n-- app dirs (without optional '_app_author')")
dirs = AppPath(_app_name, ensure_existence_on_access=False)
for prop in props:
print(f"{prop}: {getattr(dirs, prop)}")
print("\n-- app dirs (with disabled '_app_author')")
dirs = AppPath(_app_name, ensure_existence_on_access=False)
for prop in props:
print(f"{prop}: {getattr(dirs, prop)}")
ex = None
try:
dirs / "a"
except Exception as e:
ex = e
assert ex is not None, "expected exception"
main()
|
Apppath
|
/Apppath-1.0.3.tar.gz/Apppath-1.0.3/apppath/app_path.py
|
app_path.py
|
# Project Timeline
[](https://opensource.org/licenses/MIT)
## Overview
Describe the project and what it does.
This project is a Python script that imports data from an Excel sheet and creates a graphical representation of the data using matplotlib and PySimpleGUI libraries.
## Installation
List any prerequisites or requirements needed to install and run the project, along with instructions to install them.
-Python 3.x
-Required libraries: openpyxl, PySimpleGUI, matplotlib, pandas, mplcursors
-To install the required libraries, run the following command:
`pip install openpyxl PySimpleGUI matplotlib pandas mplcursors`
@
-To install your package and its dependencies with the following command:
`pip install .`
## Usage
To run the script, open a terminal or command prompt and navigate to the directory where the script is saved. Then, run the following command:
`python project_timeline.py`
When the script runs, a PySimpleGUI window will appear with various input fields. Use the fields to specify which data to display and how it should be displayed.
Once you have entered your desired inputs, click the "Apply" button to generate the plot.
Or
1. Create and activate a virtual environment: `python3 -m venv env` and `source env/bin/activate`
2. Install dependencies: `pip install -r requirements.txt`
3. Run the script: `python my_script.py`
## Contributing
Contributions are welcome! Please open an issue or submit a pull request if you have any suggestions or bug fixes.
## License
This project is licensed under the terms of the XXX license. See the [LICENSE](LICENSE) file for details.
|
Apps-package
|
/Apps_package-0.1.0.tar.gz/Apps_package-0.1.0/README.md
|
README.md
|
===========================
Apptimize Python Server SDK
===========================
Description
===========
The Apptimize Python Server SDK allows application developers to run A/B tests and manage feature flags across Python server instances. The SDK is used by Apptimize Cross-Platform customers in conjunction with the Apptimize Dashboard, which provides a web-based interface for managing experiments and feature flags.
In addition, the Apptimize Python Server SDK can be used with client-side SDKs from Apptimize for iOS, Android, tvOS, Android TV, and JavaScript in order to manage the entire end-user digital experience and understand how users interact across different channels.
For more information or to start a free trial of Apptimize’s Cross-Platform experimentation product, please visit https://apptimize.com
Documentation
=============
Documentation for Apptimize's product and the Python Server SDK can be found at https://apptimize.com/docs/
|
Apptimize
|
/Apptimize-1.2.39.tar.gz/Apptimize-1.2.39/README.rst
|
README.rst
|
import sys
import math as python_lib_Math
import math as Math
from os import path as python_lib_os_Path
import inspect as python_lib_Inspect
import atexit as apptimize_native_python_AtExit
from threading import Event as apptimize_native_python_Event
from requests import Session as apptimize_native_python_Session
from requests_futures.sessions import FuturesSession as apptimize_native_python_FuturesSession
import requests as apptimize_native_python_Requests
import sys as python_lib_Sys
import builtins as python_lib_Builtins
import functools as python_lib_Functools
import json as python_lib_Json
import os as python_lib_Os
import random as python_lib_Random
import re as python_lib_Re
import ssl as python_lib_Ssl
import time as python_lib_Time
import traceback as python_lib_Traceback
from datetime import datetime as python_lib_datetime_Datetime
from datetime import timezone as python_lib_datetime_Timezone
from io import StringIO as python_lib_io_StringIO
from socket import socket as python_lib_socket_Socket
from ssl import SSLContext as python_lib_ssl_SSLContext
from threading import RLock as python_lib_threading_RLock
from threading import Thread as python_lib_threading_Thread
import urllib.parse as python_lib_urllib_Parse
from threading import Semaphore as Lock
from threading import RLock as sys_thread__Mutex_NativeRLock
import threading
class _hx_AnonObject:
_hx_disable_getattr = False
def __init__(self, fields):
self.__dict__ = fields
def __repr__(self):
return repr(self.__dict__)
def __contains__(self, item):
return item in self.__dict__
def __getitem__(self, item):
return self.__dict__[item]
def __getattr__(self, name):
if (self._hx_disable_getattr):
raise AttributeError('field does not exist')
else:
return None
def _hx_hasattr(self,field):
self._hx_disable_getattr = True
try:
getattr(self, field)
self._hx_disable_getattr = False
return True
except AttributeError:
self._hx_disable_getattr = False
return False
_hx_classes = {}
class Enum:
_hx_class_name = "Enum"
_hx_is_interface = "False"
__slots__ = ("tag", "index", "params")
_hx_fields = ["tag", "index", "params"]
_hx_methods = ["__str__"]
def __init__(self,tag,index,params):
self.tag = tag
self.index = index
self.params = params
def __str__(self):
if (self.params is None):
return self.tag
else:
return self.tag + '(' + (', '.join(str(v) for v in self.params)) + ')'
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.tag = None
_hx_o.index = None
_hx_o.params = None
Enum._hx_class = Enum
_hx_classes["Enum"] = Enum
class apptimize_Apptimize:
_hx_class_name = "apptimize.Apptimize"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_isInitialized", "_getApptimizeAnonUserId", "setAppVersion", "setAppName", "setOnParticipationCallback", "setOnMetadataUpdatedCallback", "setOnApptimizeInitializedCallback", "setOnParticipatedInExperimentCallback", "setup", "shutdown", "updateApptimizeMetadataOnce", "flushTracking", "getApptimizeSDKVersion", "getApptimizeSDKPlatform", "_initialize", "_getAlterations", "_getCodeBlockMethod", "runCodeBlock", "isFeatureFlagEnabled", "getString", "getBool", "getInt", "getDouble", "getStringArray", "getBoolArray", "getIntArray", "getDoubleArray", "getStringDictionary", "getBoolDictionary", "getIntDictionary", "getDoubleDictionary", "_getValue", "getVariantInfo", "_getVariantInfoForAlteration", "_getVariantInfoForDynamicVariable", "_getVariantInfoForExperiment", "track", "trackValue", "getMetadataSequenceNumber"]
@staticmethod
def _isInitialized():
return apptimize_ApptimizeInternal._isInitialized()
@staticmethod
def _getApptimizeAnonUserId():
anonUserId = apptimize_support_persistence_ABTPersistence.loadString(apptimize_support_persistence_ABTPersistence.kAnonymousGuidKey)
if (((anonUserId is None) or ((anonUserId == ""))) or (not apptimize_api_ABTUserGuid.isValidGuid(anonUserId))):
anonUserId = apptimize_api_ABTUserGuid.generateUserGuid()
apptimize_support_persistence_ABTPersistence.saveString(apptimize_support_persistence_ABTPersistence.kAnonymousGuidKey,anonUserId)
return anonUserId
@staticmethod
def setAppVersion(version):
apptimize_support_properties_ABTApplicationProperties.sharedInstance().setProperty("app_version",version)
app_version = apptimize_support_properties_ABTApplicationProperties.sharedInstance().valueForProperty("app_version")
apptimize_ABTLogger.v(("App Version set to: " + ("null" if app_version is None else app_version)),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 264, 'className': "apptimize.Apptimize", 'methodName': "setAppVersion"}))
@staticmethod
def setAppName(name):
apptimize_support_properties_ABTApplicationProperties.sharedInstance().setProperty("app_name",name)
app_name = apptimize_support_properties_ABTApplicationProperties.sharedInstance().valueForProperty("app_name")
apptimize_ABTLogger.v(("App Name set to: " + ("null" if app_name is None else app_name)),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 275, 'className': "apptimize.Apptimize", 'methodName': "setAppName"}))
@staticmethod
def setOnParticipationCallback(callback):
apptimize_events_ABTEventManager.setOnParticipationCallback(callback)
apptimize_ABTLogger.w("setOnParticipationCallback is deprecated - please use setOnParticipatedInExperimentCallback.",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 296, 'className': "apptimize.Apptimize", 'methodName': "setOnParticipationCallback"}))
@staticmethod
def setOnMetadataUpdatedCallback(callback):
apptimize_events_ABTEventManager.setOnMetadataUpdatedCallback(callback)
apptimize_ABTLogger.v("OnMetadataProcessedCallback set!",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 315, 'className': "apptimize.Apptimize", 'methodName': "setOnMetadataUpdatedCallback"}))
@staticmethod
def setOnApptimizeInitializedCallback(callback):
apptimize_events_ABTEventManager.setOnApptimizeInitializedCallback(callback)
apptimize_ABTLogger.v("OnApptimizeInitializedCallback set!",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 334, 'className': "apptimize.Apptimize", 'methodName': "setOnApptimizeInitializedCallback"}))
@staticmethod
def setOnParticipatedInExperimentCallback(callback):
apptimize_events_ABTEventManager.setOnParticipatedInExperimentCallback(callback)
apptimize_ABTLogger.v("OnParticipatedInExperimentCallback set!",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 352, 'className': "apptimize.Apptimize", 'methodName': "setOnParticipatedInExperimentCallback"}))
@staticmethod
def setup(appKey,configAttributes = None):
if ((appKey is None) or ((appKey == ""))):
apptimize_ABTLogger.c("Unable to initialize Apptimize due to missing app key.",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 404, 'className': "apptimize.Apptimize", 'methodName': "setup"}))
return
elif ((apptimize_ABTDataStore.getAppKey() is not None) and ((apptimize_ABTDataStore.getAppKey() == appKey))):
apptimize_ABTLogger.w((("Apptimize is already initialized with app key: \"" + ("null" if appKey is None else appKey)) + "\"."),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 407, 'className': "apptimize.Apptimize", 'methodName': "setup"}))
return
def _hx_local_0():
if ((apptimize_ABTDataStore.getAppKey() is not None) and ((apptimize_ABTDataStore.getAppKey() != appKey))):
apptimize_ABTDataStore.clear()
apptimize_ABTLogger.v(("Set Anonymous User ID: " + HxOverrides.stringOrNull(apptimize_Apptimize._getApptimizeAnonUserId())),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 418, 'className': "apptimize.Apptimize", 'methodName': "setup"}))
apptimize_ABTLogger.i(((("Apptimize " + HxOverrides.stringOrNull(apptimize_Apptimize.getApptimizeSDKPlatform())) + " SDK initialized.\nApptimize SDK Version: ") + HxOverrides.stringOrNull(apptimize_Apptimize.getApptimizeSDKVersion())),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 419, 'className': "apptimize.Apptimize", 'methodName': "setup"}))
apptimize_Apptimize._initialize(appKey)
apptimize_ApptimizeInternal._setup(appKey,configAttributes,_hx_local_0)
@staticmethod
def shutdown():
apptimize_ApptimizeInternal.shutdown()
@staticmethod
def updateApptimizeMetadataOnce():
try:
apptimize_ABTDataStore.checkForUpdatedMetaData(True)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(("Failed to update Metadata: " + Std.string(e)),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 451, 'className': "apptimize.Apptimize", 'methodName': "updateApptimizeMetadataOnce"}))
@staticmethod
def flushTracking():
if apptimize_Apptimize._isInitialized():
apptimize_ABTDataStore.sharedInstance().flushTracking()
else:
apptimize_ABTLogger.w("Tracking can only be flushed after setup().",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 466, 'className': "apptimize.Apptimize", 'methodName': "flushTracking"}))
@staticmethod
def getApptimizeSDKVersion():
return "1.2.39"
@staticmethod
def getApptimizeSDKPlatform():
sdkPlatform = "N/A"
sdkPlatform = "Python"
return sdkPlatform
@staticmethod
def _initialize(appKey):
apptimize_ABTDataStore.sharedInstance().loadMetaData(appKey)
apptimize_ApptimizeInternal.setState(2)
if (apptimize_ABTDataStore.sharedInstance().hasMetadata(apptimize_ABTDataStore.getAppKey()) and apptimize_ApptimizeInternal._trySetReady()):
apptimize_events_ABTEventManager.dispatchOnApptimizeInitialized()
apptimize_ABTLogger.i((("Apptimize initialized with app key \"" + ("null" if appKey is None else appKey)) + "\"."),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 521, 'className': "apptimize.Apptimize", 'methodName': "_initialize"}))
@staticmethod
def _getAlterations(userID,customAttributes):
apptimize_ABTDataStore._checkForUpdatedMetadataIfNecessary()
if apptimize_Apptimize._isInitialized():
envParams = apptimize_filter_ABTFilterEnvParams(userID,apptimize_Apptimize._getApptimizeAnonUserId(),customAttributes,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
return apptimize_ApptimizeInternal._getAlterations(envParams)
return list()
@staticmethod
def _getCodeBlockMethod(codeBlockVariableName,userID,customAttributes):
apptimize_ABTDataStore._checkForUpdatedMetadataIfNecessary()
envParams = apptimize_filter_ABTFilterEnvParams(userID,apptimize_Apptimize._getApptimizeAnonUserId(),customAttributes,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
return apptimize_ApptimizeInternal._getCodeBlockMethod(envParams,codeBlockVariableName)
@staticmethod
def runCodeBlock(codeBlockVariableName,callback,userID,customAttributes = None):
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
attrs = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
methodName = apptimize_Apptimize._getCodeBlockMethod(codeBlockVariableName,userID,attrs)
callbackMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(callback)
if ((methodName is None) or ((methodName == ""))):
apptimize_ABTLogger.w((("No Code Block with variable name " + ("null" if codeBlockVariableName is None else codeBlockVariableName)) + " found, skipping callback."),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 615, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
return
elif ((callback is None) or ((callbackMap.h.get(methodName,None) is None))):
method = Reflect.getProperty(callback,methodName)
if (method is not None):
Reflect.callMethod(callback,method,[])
else:
apptimize_ABTLogger.w(("Supplied callbacks do not include method: " + ("null" if methodName is None else methodName)),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 623, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
else:
apptimize_ABTLogger.v(("Calling callback method: " + ("null" if methodName is None else methodName)),_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 626, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
func = callbackMap.h.get(methodName,None)
if (not Reflect.isFunction(func)):
apptimize_ABTLogger.e("runCodeBlock() called with callback that isn't a function/method.",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 630, 'className': "apptimize.Apptimize", 'methodName': "runCodeBlock"}))
return
func()
@staticmethod
def isFeatureFlagEnabled(name,userID,customAttributes = None):
return apptimize_Apptimize.getBool(name,False,userID,customAttributes)
@staticmethod
def getString(name,defaultValue,userID,customAttributes = None):
stringValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.String,None,customAttributes)
if (stringValue is None):
return defaultValue
return stringValue
@staticmethod
def getBool(name,defaultValue,userID,customAttributes = None):
boolValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Boolean,None,customAttributes)
if (boolValue is None):
return defaultValue
return boolValue
@staticmethod
def getInt(name,defaultValue,userID,customAttributes = None):
intValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Integer,None,customAttributes)
if (intValue is None):
return defaultValue
return intValue
@staticmethod
def getDouble(name,defaultValue,userID,customAttributes = None):
floatValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Double,None,customAttributes)
if (floatValue is None):
return defaultValue
return floatValue
@staticmethod
def getStringArray(name,defaultValue,userID,customAttributes = None):
stringArrayValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Array,apptimize_ABTApptimizeVariableType.String,customAttributes)
if (stringArrayValue is None):
return defaultValue
return apptimize_util_ABTUtilArray.toNativeArray(stringArrayValue,apptimize_util_ArrayType.String)
@staticmethod
def getBoolArray(name,defaultValue,userID,customAttributes = None):
boolArrayValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Array,apptimize_ABTApptimizeVariableType.Boolean,customAttributes)
if (boolArrayValue is None):
return defaultValue
return apptimize_util_ABTUtilArray.toNativeArray(boolArrayValue,apptimize_util_ArrayType.Bool)
@staticmethod
def getIntArray(name,defaultValue,userID,customAttributes = None):
intArrayValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Array,apptimize_ABTApptimizeVariableType.Integer,customAttributes)
if (intArrayValue is None):
return defaultValue
return apptimize_util_ABTUtilArray.toNativeArray(intArrayValue,apptimize_util_ArrayType.Int)
@staticmethod
def getDoubleArray(name,defaultValue,userID,customAttributes = None):
doubleArrayValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Array,apptimize_ABTApptimizeVariableType.Double,customAttributes)
if (doubleArrayValue is None):
return defaultValue
return apptimize_util_ABTUtilArray.toNativeArray(doubleArrayValue,apptimize_util_ArrayType.Double)
@staticmethod
def getStringDictionary(name,defaultValue,userID,customAttributes = None):
stringDictionaryValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Dictionary,apptimize_ABTApptimizeVariableType.String,customAttributes)
if (stringDictionaryValue is None):
return defaultValue
return stringDictionaryValue
@staticmethod
def getBoolDictionary(name,defaultValue,userID,customAttributes = None):
boolDictionaryValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Dictionary,apptimize_ABTApptimizeVariableType.Boolean,customAttributes)
if (boolDictionaryValue is None):
return defaultValue
return boolDictionaryValue
@staticmethod
def getIntDictionary(name,defaultValue,userID,customAttributes = None):
intDictionaryValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Dictionary,apptimize_ABTApptimizeVariableType.Integer,customAttributes)
if (intDictionaryValue is None):
return defaultValue
return intDictionaryValue
@staticmethod
def getDoubleDictionary(name,defaultValue,userID,customAttributes = None):
doubleDictionaryValue = apptimize_Apptimize._getValue(name,userID,apptimize_ABTApptimizeVariableType.Dictionary,apptimize_ABTApptimizeVariableType.Double,customAttributes)
if (doubleDictionaryValue is None):
return defaultValue
return doubleDictionaryValue
@staticmethod
def _getValue(name,userID,_hx_type,nestedType,customAttributes):
if (not apptimize_Apptimize._isInitialized()):
return None
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "_getValue"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "_getValue"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "_getValue"}))
attrMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
envParams = apptimize_filter_ABTFilterEnvParams(userID,apptimize_Apptimize._getApptimizeAnonUserId(),attrMap,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
return apptimize_ABTApptimizeVariable.getValue(envParams,name,_hx_type,nestedType)
@staticmethod
def getVariantInfo(userID,customAttributes = None):
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "getVariantInfo"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "getVariantInfo"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "getVariantInfo"}))
variantInfos = list()
anonID = apptimize_Apptimize._getApptimizeAnonUserId()
attrMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
envParams = apptimize_filter_ABTFilterEnvParams(userID,anonID,attrMap,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
_g = 0
_g1 = apptimize_ApptimizeInternal._getVariants(envParams)
while (_g < len(_g1)):
variant = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
x = apptimize_VariantInfo.initWithVariant(variant,userID,anonID)
variantInfos.append(x)
return apptimize_util_ABTUtilArray.toNativeArray(variantInfos,apptimize_util_ArrayType.VariantInfo)
@staticmethod
def _getVariantInfoForAlteration(name,userID,customAttributes):
anonID = apptimize_Apptimize._getApptimizeAnonUserId()
_g = 0
_g1 = apptimize_Apptimize._getAlterations(userID,customAttributes)
while (_g < len(_g1)):
alteration = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if (alteration.getKey() == name):
return apptimize_VariantInfo.initWithVariant(alteration.getVariant(),userID,anonID)
return None
@staticmethod
def _getVariantInfoForDynamicVariable(name,userID,customAttributes = None):
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "_getVariantInfoForDynamicVariable"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "_getVariantInfoForDynamicVariable"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "_getVariantInfoForDynamicVariable"}))
attrMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
return apptimize_Apptimize._getVariantInfoForAlteration(name,userID,attrMap)
@staticmethod
def _getVariantInfoForExperiment(name,userID,customAttributes = None):
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "_getVariantInfoForExperiment"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "_getVariantInfoForExperiment"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "_getVariantInfoForExperiment"}))
anonID = apptimize_Apptimize._getApptimizeAnonUserId()
attrMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
envParams = apptimize_filter_ABTFilterEnvParams(userID,anonID,attrMap,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
_g = 0
_g1 = apptimize_ApptimizeInternal._getVariants(envParams)
while (_g < len(_g1)):
variant = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if (variant.getExperimentName() == name):
return apptimize_VariantInfo.initWithVariant(variant,userID,anonID)
return None
@staticmethod
def track(eventName,userID,customAttributes = None):
if apptimize_Apptimize._isInitialized():
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "track"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "track"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "track"}))
attrMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
envParams = apptimize_filter_ABTFilterEnvParams(userID,apptimize_Apptimize._getApptimizeAnonUserId(),attrMap,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
apptimize_ApptimizeInternal.generateTrackEvent(envParams,eventName,None)
else:
apptimize_ABTLogger.w("Events can only be tracked after setup() has been called.",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 1215, 'className': "apptimize.Apptimize", 'methodName': "track"}))
@staticmethod
def trackValue(eventName,value,userID,customAttributes = None):
if apptimize_Apptimize._isInitialized():
if (userID is not None):
if ((not apptimize_util_ABTTypes.isString(userID)) or ((StringTools.ltrim(userID) == ""))):
apptimize_ABTLogger.w("The `userID` argument cannot be set to a non-string value, be empty or be whitespace only, setting to null instead.",_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 47, 'className': "apptimize.Apptimize", 'methodName': "trackValue"}))
userID = None
if (userID is None):
apptimize_ABTLogger.c((("The parameter " + "userID") + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 53, 'className': "apptimize.Apptimize", 'methodName': "trackValue"}))
if ((customAttributes is None) and False):
apptimize_ABTLogger.c((("The parameter " + HxOverrides.stringOrNull(None)) + " is required"),_hx_AnonObject({'fileName': "src/apptimize/macros/ABTClientMacro.hx", 'lineNumber': 59, 'className': "apptimize.Apptimize", 'methodName': "trackValue"}))
if ((not Std.isOfType(value,Float)) and (not Std.isOfType(value,apptimize_util_ArrayType.Int))):
apptimize_ABTLogger.w("trackValue() called with a non-float value. Event not logged.",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 1246, 'className': "apptimize.Apptimize", 'methodName': "trackValue"}))
return
attrMap = apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(customAttributes)
envParams = apptimize_filter_ABTFilterEnvParams(userID,apptimize_Apptimize._getApptimizeAnonUserId(),attrMap,apptimize_ABTDataStore.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
apptimize_ApptimizeInternal.generateTrackEvent(envParams,eventName,value)
else:
apptimize_ABTLogger.w("Events can only be tracked after setup() has been called.",_hx_AnonObject({'fileName': "src/apptimize/Apptimize.hx", 'lineNumber': 1254, 'className': "apptimize.Apptimize", 'methodName': "trackValue"}))
@staticmethod
def getMetadataSequenceNumber():
store = apptimize_ABTDataStore.sharedInstance()
md = store.getMetaData(apptimize_ABTDataStore.getAppKey())
if (md is not None):
return md.getSequenceNumber()
return 0
apptimize_Apptimize._hx_class = apptimize_Apptimize
_hx_classes["apptimize.Apptimize"] = apptimize_Apptimize
class Apptimize(apptimize_Apptimize):
_hx_class_name = "Apptimize"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_Apptimize
Apptimize._hx_class = Apptimize
_hx_classes["Apptimize"] = Apptimize
class apptimize_VariantInfo:
_hx_class_name = "apptimize.VariantInfo"
_hx_is_interface = "False"
__slots__ = ("_variantId", "_variantName", "_experimentId", "_experimentName", "_experimentType", "_experimentTypeName", "_currentPhase", "_participationPhase", "_cycle", "_userId", "_anonymousUserId", "_userHasParticipated")
_hx_fields = ["_variantId", "_variantName", "_experimentId", "_experimentName", "_experimentType", "_experimentTypeName", "_currentPhase", "_participationPhase", "_cycle", "_userId", "_anonymousUserId", "_userHasParticipated"]
_hx_methods = ["getVariantId", "getVariantName", "getExperimentId", "getExperimentName", "getExperimentType", "getExperimentTypeName", "getCurrentPhase", "getParticipationPhase", "getCycle", "getUserId", "getAnonymousUserId"]
_hx_statics = ["initWithVariant", "apptimizeExperimentTypeForString"]
def __init__(self,variantId,variantName,experimentId,experimentName,experimentType,cycle,currentPhase,participationPhase,userId,anonymousUserId,userHasParticipated):
self._variantId = variantId
self._variantName = variantName
self._experimentId = experimentId
self._experimentName = experimentName
self._experimentType = apptimize_VariantInfo.apptimizeExperimentTypeForString(experimentType)
self._experimentTypeName = experimentType
self._cycle = cycle
self._currentPhase = currentPhase
self._participationPhase = participationPhase
self._userId = userId
self._anonymousUserId = anonymousUserId
self._userHasParticipated = userHasParticipated
def getVariantId(self):
return self._variantId
def getVariantName(self):
return self._variantName
def getExperimentId(self):
return self._experimentId
def getExperimentName(self):
return self._experimentName
def getExperimentType(self):
return self._experimentType
def getExperimentTypeName(self):
return self._experimentTypeName
def getCurrentPhase(self):
return self._currentPhase
def getParticipationPhase(self):
return self._participationPhase
def getCycle(self):
return self._cycle
def getUserId(self):
return self._userId
def getAnonymousUserId(self):
return self._anonymousUserId
@staticmethod
def initWithVariant(variant,userId,anonymousUserId):
participationPhase = 0
variantString = ((("v" + Std.string(variant.getVariantID())) + "_") + Std.string(variant.getCycle()))
userHasParticipated = False
return apptimize_VariantInfo(variant.getVariantID(),variant.getVariantName(),variant.getExperimentID(),variant.getExperimentName(),variant.getExperimentType(),variant.getCycle(),variant.getPhase(),participationPhase,userId,anonymousUserId,userHasParticipated)
@staticmethod
def apptimizeExperimentTypeForString(stringType):
_hx_type = stringType.lower()
type1 = _hx_type
_hx_local_0 = len(type1)
if (_hx_local_0 == 10):
if (type1 == "code-block"):
return apptimize_ApptimizeExperimentType.CodeBlock
else:
return apptimize_ApptimizeExperimentType.Unknown
elif (_hx_local_0 == 9):
if (type1 == "int-value"):
return apptimize_ApptimizeExperimentType.DynamicVariables
elif (type1 == "variables"):
return apptimize_ApptimizeExperimentType.DynamicVariables
else:
return apptimize_ApptimizeExperimentType.Unknown
elif (_hx_local_0 == 12):
if (type1 == "double-value"):
return apptimize_ApptimizeExperimentType.DynamicVariables
elif (type1 == "feature-flag"):
return apptimize_ApptimizeExperimentType.FeatureFlag
elif (type1 == "string-value"):
return apptimize_ApptimizeExperimentType.DynamicVariables
else:
return apptimize_ApptimizeExperimentType.Unknown
elif (_hx_local_0 == 7):
if (type1 == "wysiwyg"):
return apptimize_ApptimizeExperimentType.Visual
else:
return apptimize_ApptimizeExperimentType.Unknown
elif (_hx_local_0 == 14):
if (type1 == "feature-config"):
return apptimize_ApptimizeExperimentType.FeatureVariables
else:
return apptimize_ApptimizeExperimentType.Unknown
else:
return apptimize_ApptimizeExperimentType.Unknown
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._variantId = None
_hx_o._variantName = None
_hx_o._experimentId = None
_hx_o._experimentName = None
_hx_o._experimentType = None
_hx_o._experimentTypeName = None
_hx_o._currentPhase = None
_hx_o._participationPhase = None
_hx_o._cycle = None
_hx_o._userId = None
_hx_o._anonymousUserId = None
_hx_o._userHasParticipated = None
apptimize_VariantInfo._hx_class = apptimize_VariantInfo
_hx_classes["apptimize.VariantInfo"] = apptimize_VariantInfo
class VariantInfo(apptimize_VariantInfo):
_hx_class_name = "VariantInfo"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_VariantInfo
def __init__(self,variantId,variantName,experimentId,experimentName,experimentType,cycle,currentPhase,participationPhase,userId,anonymousUserId,userHasParticipated):
super().__init__(variantId,variantName,experimentId,experimentName,experimentType,cycle,currentPhase,participationPhase,userId,anonymousUserId,userHasParticipated)
VariantInfo._hx_class = VariantInfo
_hx_classes["VariantInfo"] = VariantInfo
class Class: pass
class Date:
_hx_class_name = "Date"
_hx_is_interface = "False"
__slots__ = ("date", "dateUTC")
_hx_fields = ["date", "dateUTC"]
_hx_methods = ["toString"]
_hx_statics = ["now", "fromTime", "makeLocal", "fromString"]
def __init__(self,year,month,day,hour,_hx_min,sec):
self.dateUTC = None
if (year < python_lib_datetime_Datetime.min.year):
year = python_lib_datetime_Datetime.min.year
if (day == 0):
day = 1
self.date = Date.makeLocal(python_lib_datetime_Datetime(year,(month + 1),day,hour,_hx_min,sec,0))
self.dateUTC = self.date.astimezone(python_lib_datetime_Timezone.utc)
def toString(self):
return self.date.strftime("%Y-%m-%d %H:%M:%S")
@staticmethod
def now():
d = Date(2000,0,1,0,0,0)
d.date = Date.makeLocal(python_lib_datetime_Datetime.now())
d.dateUTC = d.date.astimezone(python_lib_datetime_Timezone.utc)
return d
@staticmethod
def fromTime(t):
d = Date(2000,0,1,0,0,0)
d.date = Date.makeLocal(python_lib_datetime_Datetime.fromtimestamp((t / 1000.0)))
d.dateUTC = d.date.astimezone(python_lib_datetime_Timezone.utc)
return d
@staticmethod
def makeLocal(date):
try:
return date.astimezone()
except BaseException as _g:
None
tzinfo = python_lib_datetime_Datetime.now(python_lib_datetime_Timezone.utc).astimezone().tzinfo
return date.replace(**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'tzinfo': tzinfo})))
@staticmethod
def fromString(s):
_g = len(s)
if (_g == 8):
k = s.split(":")
return Date.fromTime((((Std.parseInt((k[0] if 0 < len(k) else None)) * 3600000.) + ((Std.parseInt((k[1] if 1 < len(k) else None)) * 60000.))) + ((Std.parseInt((k[2] if 2 < len(k) else None)) * 1000.))))
elif (_g == 10):
k = s.split("-")
return Date(Std.parseInt((k[0] if 0 < len(k) else None)),(Std.parseInt((k[1] if 1 < len(k) else None)) - 1),Std.parseInt((k[2] if 2 < len(k) else None)),0,0,0)
elif (_g == 19):
k = s.split(" ")
_this = (k[0] if 0 < len(k) else None)
y = _this.split("-")
_this = (k[1] if 1 < len(k) else None)
t = _this.split(":")
return Date(Std.parseInt((y[0] if 0 < len(y) else None)),(Std.parseInt((y[1] if 1 < len(y) else None)) - 1),Std.parseInt((y[2] if 2 < len(y) else None)),Std.parseInt((t[0] if 0 < len(t) else None)),Std.parseInt((t[1] if 1 < len(t) else None)),Std.parseInt((t[2] if 2 < len(t) else None)))
else:
raise haxe_Exception.thrown(("Invalid date format : " + ("null" if s is None else s)))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.date = None
_hx_o.dateUTC = None
Date._hx_class = Date
_hx_classes["Date"] = Date
class EReg:
_hx_class_name = "EReg"
_hx_is_interface = "False"
__slots__ = ("pattern", "matchObj", "_hx_global")
_hx_fields = ["pattern", "matchObj", "global"]
_hx_methods = ["replace", "map"]
def __init__(self,r,opt):
self.matchObj = None
self._hx_global = False
options = 0
_g = 0
_g1 = len(opt)
while (_g < _g1):
i = _g
_g = (_g + 1)
c = (-1 if ((i >= len(opt))) else ord(opt[i]))
if (c == 109):
options = (options | python_lib_Re.M)
if (c == 105):
options = (options | python_lib_Re.I)
if (c == 115):
options = (options | python_lib_Re.S)
if (c == 117):
options = (options | python_lib_Re.U)
if (c == 103):
self._hx_global = True
self.pattern = python_lib_Re.compile(r,options)
def replace(self,s,by):
_this = by.split("$$")
by = "_hx_#repl#__".join([python_Boot.toString1(x1,'') for x1 in _this])
def _hx_local_0(x):
res = by
g = x.groups()
_g = 0
_g1 = len(g)
while (_g < _g1):
i = _g
_g = (_g + 1)
gs = g[i]
if (gs is None):
continue
delimiter = ("$" + HxOverrides.stringOrNull(str((i + 1))))
_this = (list(res) if ((delimiter == "")) else res.split(delimiter))
res = gs.join([python_Boot.toString1(x1,'') for x1 in _this])
_this = res.split("_hx_#repl#__")
res = "$".join([python_Boot.toString1(x1,'') for x1 in _this])
return res
replace = _hx_local_0
return python_lib_Re.sub(self.pattern,replace,s,(0 if (self._hx_global) else 1))
def map(self,s,f):
buf_b = python_lib_io_StringIO()
pos = 0
right = s
cur = self
while (pos < len(s)):
if (self.matchObj is None):
self.matchObj = python_lib_Re.search(self.pattern,s)
else:
self.matchObj = self.matchObj.re.search(s,pos)
if (self.matchObj is None):
break
pos1 = self.matchObj.end()
curPos_pos = cur.matchObj.start()
curPos_len = (cur.matchObj.end() - cur.matchObj.start())
buf_b.write(Std.string(HxString.substr(HxString.substr(cur.matchObj.string,0,cur.matchObj.start()),pos,None)))
buf_b.write(Std.string(f(cur)))
right = HxString.substr(cur.matchObj.string,cur.matchObj.end(),None)
if (not self._hx_global):
buf_b.write(Std.string(right))
return buf_b.getvalue()
if (curPos_len == 0):
buf_b.write(Std.string(("" if (((pos1 < 0) or ((pos1 >= len(s))))) else s[pos1])))
right = HxString.substr(right,1,None)
pos = (pos1 + 1)
else:
pos = pos1
buf_b.write(Std.string(right))
return buf_b.getvalue()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.pattern = None
_hx_o.matchObj = None
_hx_o._hx_global = None
EReg._hx_class = EReg
_hx_classes["EReg"] = EReg
class Lambda:
_hx_class_name = "Lambda"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["array"]
@staticmethod
def array(it):
a = list()
i = HxOverrides.iterator(it)
while i.hasNext():
i1 = i.next()
a.append(i1)
return a
Lambda._hx_class = Lambda
_hx_classes["Lambda"] = Lambda
class _Math_Math_Impl_:
_hx_class_name = "_Math.Math_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["random"]
@staticmethod
def random():
return python_lib_Random.random()
_Math_Math_Impl_._hx_class = _Math_Math_Impl_
_hx_classes["_Math.Math_Impl_"] = _Math_Math_Impl_
class Reflect:
_hx_class_name = "Reflect"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["field", "setField", "getProperty", "callMethod", "isFunction"]
@staticmethod
def field(o,field):
return python_Boot.field(o,field)
@staticmethod
def setField(o,field,value):
setattr(o,(("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field)),value)
@staticmethod
def getProperty(o,field):
if (o is None):
return None
if (field in python_Boot.keywords):
field = ("_hx_" + field)
elif ((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95))):
field = ("_hx_" + field)
if isinstance(o,_hx_AnonObject):
return Reflect.field(o,field)
tmp = Reflect.field(o,("get_" + ("null" if field is None else field)))
if ((tmp is not None) and callable(tmp)):
return tmp()
else:
return Reflect.field(o,field)
@staticmethod
def callMethod(o,func,args):
if callable(func):
return func(*args)
else:
return None
@staticmethod
def isFunction(f):
if (not ((python_lib_Inspect.isfunction(f) or python_lib_Inspect.ismethod(f)))):
return python_Boot.hasField(f,"func_code")
else:
return True
Reflect._hx_class = Reflect
_hx_classes["Reflect"] = Reflect
class Std:
_hx_class_name = "Std"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["downcast", "is", "isOfType", "string", "parseInt", "shortenPossibleNumber", "parseFloat"]
@staticmethod
def downcast(value,c):
try:
tmp = None
if (not isinstance(value,c)):
if c._hx_is_interface:
cls = c
loop = None
def _hx_local_1(intf):
f = (intf._hx_interfaces if (hasattr(intf,"_hx_interfaces")) else [])
if (f is not None):
_g = 0
while (_g < len(f)):
i = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
if (i == cls):
return True
else:
l = loop(i)
if l:
return True
return False
else:
return False
loop = _hx_local_1
currentClass = value.__class__
result = False
while (currentClass is not None):
if loop(currentClass):
result = True
break
currentClass = python_Boot.getSuperClass(currentClass)
tmp = result
else:
tmp = False
else:
tmp = True
if tmp:
return value
else:
return None
except BaseException as _g:
None
return None
@staticmethod
def _hx_is(v,t):
return Std.isOfType(v,t)
@staticmethod
def isOfType(v,t):
if ((v is None) and ((t is None))):
return False
if (t is None):
return False
if ((type(t) == type) and (t == Dynamic)):
return (v is not None)
isBool = isinstance(v,bool)
if (((type(t) == type) and (t == Bool)) and isBool):
return True
if ((((not isBool) and (not ((type(t) == type) and (t == Bool)))) and ((type(t) == type) and (t == Int))) and isinstance(v,int)):
return True
vIsFloat = isinstance(v,float)
tmp = None
tmp1 = None
if (((not isBool) and vIsFloat) and ((type(t) == type) and (t == Int))):
f = v
tmp1 = (((f != Math.POSITIVE_INFINITY) and ((f != Math.NEGATIVE_INFINITY))) and (not python_lib_Math.isnan(f)))
else:
tmp1 = False
if tmp1:
tmp1 = None
try:
tmp1 = int(v)
except BaseException as _g:
None
tmp1 = None
tmp = (v == tmp1)
else:
tmp = False
if ((tmp and ((v <= 2147483647))) and ((v >= -2147483648))):
return True
if (((not isBool) and ((type(t) == type) and (t == Float))) and isinstance(v,(float, int))):
return True
if ((type(t) == type) and (t == str)):
return isinstance(v,str)
isEnumType = ((type(t) == type) and (t == Enum))
if ((isEnumType and python_lib_Inspect.isclass(v)) and hasattr(v,"_hx_constructs")):
return True
if isEnumType:
return False
isClassType = ((type(t) == type) and (t == Class))
if ((((isClassType and (not isinstance(v,Enum))) and python_lib_Inspect.isclass(v)) and hasattr(v,"_hx_class_name")) and (not hasattr(v,"_hx_constructs"))):
return True
if isClassType:
return False
tmp = None
try:
tmp = isinstance(v,t)
except BaseException as _g:
None
tmp = False
if tmp:
return True
if python_lib_Inspect.isclass(t):
cls = t
loop = None
def _hx_local_1(intf):
f = (intf._hx_interfaces if (hasattr(intf,"_hx_interfaces")) else [])
if (f is not None):
_g = 0
while (_g < len(f)):
i = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
if (i == cls):
return True
else:
l = loop(i)
if l:
return True
return False
else:
return False
loop = _hx_local_1
currentClass = v.__class__
result = False
while (currentClass is not None):
if loop(currentClass):
result = True
break
currentClass = python_Boot.getSuperClass(currentClass)
return result
else:
return False
@staticmethod
def string(s):
return python_Boot.toString1(s,"")
@staticmethod
def parseInt(x):
if (x is None):
return None
try:
return int(x)
except BaseException as _g:
None
base = 10
_hx_len = len(x)
foundCount = 0
sign = 0
firstDigitIndex = 0
lastDigitIndex = -1
previous = 0
_g = 0
_g1 = _hx_len
while (_g < _g1):
i = _g
_g = (_g + 1)
c = (-1 if ((i >= len(x))) else ord(x[i]))
if (((c > 8) and ((c < 14))) or ((c == 32))):
if (foundCount > 0):
return None
continue
else:
c1 = c
if (c1 == 43):
if (foundCount == 0):
sign = 1
elif (not (((48 <= c) and ((c <= 57))))):
if (not (((base == 16) and ((((97 <= c) and ((c <= 122))) or (((65 <= c) and ((c <= 90))))))))):
break
elif (c1 == 45):
if (foundCount == 0):
sign = -1
elif (not (((48 <= c) and ((c <= 57))))):
if (not (((base == 16) and ((((97 <= c) and ((c <= 122))) or (((65 <= c) and ((c <= 90))))))))):
break
elif (c1 == 48):
if (not (((foundCount == 0) or (((foundCount == 1) and ((sign != 0))))))):
if (not (((48 <= c) and ((c <= 57))))):
if (not (((base == 16) and ((((97 <= c) and ((c <= 122))) or (((65 <= c) and ((c <= 90))))))))):
break
elif ((c1 == 120) or ((c1 == 88))):
if ((previous == 48) and ((((foundCount == 1) and ((sign == 0))) or (((foundCount == 2) and ((sign != 0))))))):
base = 16
elif (not (((48 <= c) and ((c <= 57))))):
if (not (((base == 16) and ((((97 <= c) and ((c <= 122))) or (((65 <= c) and ((c <= 90))))))))):
break
elif (not (((48 <= c) and ((c <= 57))))):
if (not (((base == 16) and ((((97 <= c) and ((c <= 122))) or (((65 <= c) and ((c <= 90))))))))):
break
if (((foundCount == 0) and ((sign == 0))) or (((foundCount == 1) and ((sign != 0))))):
firstDigitIndex = i
foundCount = (foundCount + 1)
lastDigitIndex = i
previous = c
if (firstDigitIndex <= lastDigitIndex):
digits = HxString.substring(x,firstDigitIndex,(lastDigitIndex + 1))
try:
return (((-1 if ((sign == -1)) else 1)) * int(digits,base))
except BaseException as _g:
return None
return None
@staticmethod
def shortenPossibleNumber(x):
r = ""
_g = 0
_g1 = len(x)
while (_g < _g1):
i = _g
_g = (_g + 1)
c = ("" if (((i < 0) or ((i >= len(x))))) else x[i])
_g2 = HxString.charCodeAt(c,0)
if (_g2 is None):
break
else:
_g3 = _g2
if (((((((((((_g3 == 57) or ((_g3 == 56))) or ((_g3 == 55))) or ((_g3 == 54))) or ((_g3 == 53))) or ((_g3 == 52))) or ((_g3 == 51))) or ((_g3 == 50))) or ((_g3 == 49))) or ((_g3 == 48))) or ((_g3 == 46))):
r = (("null" if r is None else r) + ("null" if c is None else c))
else:
break
return r
@staticmethod
def parseFloat(x):
try:
return float(x)
except BaseException as _g:
None
if (x is not None):
r1 = Std.shortenPossibleNumber(x)
if (r1 != x):
return Std.parseFloat(r1)
return Math.NaN
Std._hx_class = Std
_hx_classes["Std"] = Std
class Float: pass
class Int: pass
class Bool: pass
class Dynamic: pass
class StringBuf:
_hx_class_name = "StringBuf"
_hx_is_interface = "False"
__slots__ = ("b",)
_hx_fields = ["b"]
_hx_methods = ["get_length"]
def __init__(self):
self.b = python_lib_io_StringIO()
def get_length(self):
pos = self.b.tell()
self.b.seek(0,2)
_hx_len = self.b.tell()
self.b.seek(pos,0)
return _hx_len
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.b = None
StringBuf._hx_class = StringBuf
_hx_classes["StringBuf"] = StringBuf
class StringTools:
_hx_class_name = "StringTools"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["isSpace", "ltrim", "rtrim", "trim", "lpad", "replace", "hex"]
@staticmethod
def isSpace(s,pos):
if (((len(s) == 0) or ((pos < 0))) or ((pos >= len(s)))):
return False
c = HxString.charCodeAt(s,pos)
if (not (((c > 8) and ((c < 14))))):
return (c == 32)
else:
return True
@staticmethod
def ltrim(s):
l = len(s)
r = 0
while ((r < l) and StringTools.isSpace(s,r)):
r = (r + 1)
if (r > 0):
return HxString.substr(s,r,(l - r))
else:
return s
@staticmethod
def rtrim(s):
l = len(s)
r = 0
while ((r < l) and StringTools.isSpace(s,((l - r) - 1))):
r = (r + 1)
if (r > 0):
return HxString.substr(s,0,(l - r))
else:
return s
@staticmethod
def trim(s):
return StringTools.ltrim(StringTools.rtrim(s))
@staticmethod
def lpad(s,c,l):
if (len(c) <= 0):
return s
buf = StringBuf()
l = (l - len(s))
while (buf.get_length() < l):
s1 = Std.string(c)
buf.b.write(s1)
s1 = Std.string(s)
buf.b.write(s1)
return buf.b.getvalue()
@staticmethod
def replace(s,sub,by):
_this = (list(s) if ((sub == "")) else s.split(sub))
return by.join([python_Boot.toString1(x1,'') for x1 in _this])
@staticmethod
def hex(n,digits = None):
s = ""
hexChars = "0123456789ABCDEF"
while True:
index = (n & 15)
s = (HxOverrides.stringOrNull((("" if (((index < 0) or ((index >= len(hexChars))))) else hexChars[index]))) + ("null" if s is None else s))
n = HxOverrides.rshift(n, 4)
if (not ((n > 0))):
break
if ((digits is not None) and ((len(s) < digits))):
diff = (digits - len(s))
_g = 0
_g1 = diff
while (_g < _g1):
_ = _g
_g = (_g + 1)
s = ("0" + ("null" if s is None else s))
return s
StringTools._hx_class = StringTools
_hx_classes["StringTools"] = StringTools
class sys_FileSystem:
_hx_class_name = "sys.FileSystem"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["exists", "createDirectory", "deleteFile"]
@staticmethod
def exists(path):
return python_lib_os_Path.exists(path)
@staticmethod
def createDirectory(path):
python_lib_Os.makedirs(path,511,True)
@staticmethod
def deleteFile(path):
python_lib_Os.remove(path)
sys_FileSystem._hx_class = sys_FileSystem
_hx_classes["sys.FileSystem"] = sys_FileSystem
class Sys:
_hx_class_name = "Sys"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["sleep", "systemName"]
@staticmethod
def sleep(seconds):
python_lib_Time.sleep(seconds)
@staticmethod
def systemName():
_g = python_lib_Sys.platform
x = _g
if x.startswith("linux"):
return "Linux"
else:
_g1 = _g
_hx_local_0 = len(_g1)
if (_hx_local_0 == 5):
if (_g1 == "win32"):
return "Windows"
else:
raise haxe_Exception.thrown("not supported platform")
elif (_hx_local_0 == 6):
if (_g1 == "cygwin"):
return "Windows"
elif (_g1 == "darwin"):
return "Mac"
else:
raise haxe_Exception.thrown("not supported platform")
else:
raise haxe_Exception.thrown("not supported platform")
Sys._hx_class = Sys
_hx_classes["Sys"] = Sys
class ValueType(Enum):
__slots__ = ()
_hx_class_name = "ValueType"
_hx_constructs = ["TNull", "TInt", "TFloat", "TBool", "TObject", "TFunction", "TClass", "TEnum", "TUnknown"]
@staticmethod
def TClass(c):
return ValueType("TClass", 6, (c,))
@staticmethod
def TEnum(e):
return ValueType("TEnum", 7, (e,))
ValueType.TNull = ValueType("TNull", 0, ())
ValueType.TInt = ValueType("TInt", 1, ())
ValueType.TFloat = ValueType("TFloat", 2, ())
ValueType.TBool = ValueType("TBool", 3, ())
ValueType.TObject = ValueType("TObject", 4, ())
ValueType.TFunction = ValueType("TFunction", 5, ())
ValueType.TUnknown = ValueType("TUnknown", 8, ())
ValueType._hx_class = ValueType
_hx_classes["ValueType"] = ValueType
class Type:
_hx_class_name = "Type"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["getClass", "getSuperClass", "getClassName", "getEnumName", "resolveClass", "resolveEnum", "createEmptyInstance", "createEnum", "getEnumConstructs", "typeof"]
@staticmethod
def getClass(o):
if (o is None):
return None
o1 = o
if ((o1 is not None) and ((HxOverrides.eq(o1,str) or python_lib_Inspect.isclass(o1)))):
return None
if isinstance(o,_hx_AnonObject):
return None
if hasattr(o,"_hx_class"):
return o._hx_class
if hasattr(o,"__class__"):
return o.__class__
else:
return None
@staticmethod
def getSuperClass(c):
return python_Boot.getSuperClass(c)
@staticmethod
def getClassName(c):
if hasattr(c,"_hx_class_name"):
return c._hx_class_name
else:
if (c == list):
return "Array"
if (c == Math):
return "Math"
if (c == str):
return "String"
try:
return c.__name__
except BaseException as _g:
None
return None
@staticmethod
def getEnumName(e):
return e._hx_class_name
@staticmethod
def resolveClass(name):
if (name == "Array"):
return list
if (name == "Math"):
return Math
if (name == "String"):
return str
cl = _hx_classes.get(name,None)
tmp = None
if (cl is not None):
o = cl
tmp = (not (((o is not None) and ((HxOverrides.eq(o,str) or python_lib_Inspect.isclass(o))))))
else:
tmp = True
if tmp:
return None
return cl
@staticmethod
def resolveEnum(name):
if (name == "Bool"):
return Bool
o = Type.resolveClass(name)
if hasattr(o,"_hx_constructs"):
return o
else:
return None
@staticmethod
def createEmptyInstance(cl):
i = cl.__new__(cl)
callInit = None
def _hx_local_0(cl):
sc = Type.getSuperClass(cl)
if (sc is not None):
callInit(sc)
if hasattr(cl,"_hx_empty_init"):
cl._hx_empty_init(i)
callInit = _hx_local_0
callInit(cl)
return i
@staticmethod
def createEnum(e,constr,params = None):
f = Reflect.field(e,constr)
if (f is None):
raise haxe_Exception.thrown(("No such constructor " + ("null" if constr is None else constr)))
if Reflect.isFunction(f):
if (params is None):
raise haxe_Exception.thrown((("Constructor " + ("null" if constr is None else constr)) + " need parameters"))
return Reflect.callMethod(e,f,params)
if ((params is not None) and ((len(params) != 0))):
raise haxe_Exception.thrown((("Constructor " + ("null" if constr is None else constr)) + " does not need parameters"))
return f
@staticmethod
def getEnumConstructs(e):
if hasattr(e,"_hx_constructs"):
x = e._hx_constructs
return list(x)
else:
return []
@staticmethod
def typeof(v):
if (v is None):
return ValueType.TNull
elif isinstance(v,bool):
return ValueType.TBool
elif isinstance(v,int):
return ValueType.TInt
elif isinstance(v,float):
return ValueType.TFloat
elif isinstance(v,str):
return ValueType.TClass(str)
elif isinstance(v,list):
return ValueType.TClass(list)
elif (isinstance(v,_hx_AnonObject) or python_lib_Inspect.isclass(v)):
return ValueType.TObject
elif isinstance(v,Enum):
return ValueType.TEnum(v.__class__)
elif (isinstance(v,type) or hasattr(v,"_hx_class")):
return ValueType.TClass(v.__class__)
elif callable(v):
return ValueType.TFunction
else:
return ValueType.TUnknown
Type._hx_class = Type
_hx_classes["Type"] = Type
class apptimize_ABTApptimizeVariableType(Enum):
__slots__ = ()
_hx_class_name = "apptimize.ABTApptimizeVariableType"
_hx_constructs = ["Invalid", "String", "Double", "Integer", "Boolean", "Array", "Dictionary"]
apptimize_ABTApptimizeVariableType.Invalid = apptimize_ABTApptimizeVariableType("Invalid", 0, ())
apptimize_ABTApptimizeVariableType.String = apptimize_ABTApptimizeVariableType("String", 1, ())
apptimize_ABTApptimizeVariableType.Double = apptimize_ABTApptimizeVariableType("Double", 2, ())
apptimize_ABTApptimizeVariableType.Integer = apptimize_ABTApptimizeVariableType("Integer", 3, ())
apptimize_ABTApptimizeVariableType.Boolean = apptimize_ABTApptimizeVariableType("Boolean", 4, ())
apptimize_ABTApptimizeVariableType.Array = apptimize_ABTApptimizeVariableType("Array", 5, ())
apptimize_ABTApptimizeVariableType.Dictionary = apptimize_ABTApptimizeVariableType("Dictionary", 6, ())
apptimize_ABTApptimizeVariableType._hx_class = apptimize_ABTApptimizeVariableType
_hx_classes["apptimize.ABTApptimizeVariableType"] = apptimize_ABTApptimizeVariableType
class apptimize_ABTApptimizeVariable:
_hx_class_name = "apptimize.ABTApptimizeVariable"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["getValue", "apptimizeVariableTypeForString"]
@staticmethod
def getValue(params,name,_hx_type,nestedType = None):
apptimize_ABTDataStore._checkForUpdatedMetadataIfNecessary()
store = apptimize_ABTDataStore.sharedInstance()
metadata = store.getMetaData(params.appkey)
if (metadata is None):
return None
env = store.makeEnvironment(params)
alterations = metadata.selectAlterationsIntoArray(env)
_g = 0
while (_g < len(alterations)):
alteration = (alterations[_g] if _g >= 0 and _g < len(alterations) else None)
_g = (_g + 1)
if (alteration.getKey() == name):
valueAlteration = Std.downcast(alteration,apptimize_models_ABTValueAlteration)
if (valueAlteration is None):
apptimize_ABTLogger.v((("Alteration found for key \"" + HxOverrides.stringOrNull(alteration.getKey())) + "\" isn't a value alteration."),_hx_AnonObject({'fileName': "src/apptimize/ABTApptimizeVariable.hx", 'lineNumber': 41, 'className': "apptimize.ABTApptimizeVariable", 'methodName': "getValue"}))
return None
alterationType = apptimize_ABTApptimizeVariable.apptimizeVariableTypeForString(valueAlteration.getType())
alterationNestedType = None
if (valueAlteration.getNestedType() is not None):
alterationNestedType = apptimize_ABTApptimizeVariable.apptimizeVariableTypeForString(valueAlteration.getNestedType())
if ((alterationType == _hx_type) and ((alterationNestedType == nestedType))):
variant = valueAlteration.getVariant()
apptimize_ABTDataStore.sharedInstance().incrementVariantRunCount(params,variant)
if (not valueAlteration.useDefaultValue()):
return valueAlteration.getValue()
apptimize_ABTLogger.v((("No alteration found for dynamic variable \"" + ("null" if name is None else name)) + "\"."),_hx_AnonObject({'fileName': "src/apptimize/ABTApptimizeVariable.hx", 'lineNumber': 63, 'className': "apptimize.ABTApptimizeVariable", 'methodName': "getValue"}))
return None
@staticmethod
def apptimizeVariableTypeForString(stringType):
_hx_type = stringType.lower()
if (_hx_type == "string"):
return apptimize_ABTApptimizeVariableType.String
elif (_hx_type == "double"):
return apptimize_ABTApptimizeVariableType.Double
elif (_hx_type == "int"):
return apptimize_ABTApptimizeVariableType.Integer
elif (_hx_type == "boolean"):
return apptimize_ABTApptimizeVariableType.Boolean
elif (_hx_type == "list"):
return apptimize_ABTApptimizeVariableType.Array
elif (_hx_type == "dictionary"):
return apptimize_ABTApptimizeVariableType.Dictionary
else:
return apptimize_ABTApptimizeVariableType.Invalid
apptimize_ABTApptimizeVariable._hx_class = apptimize_ABTApptimizeVariable
_hx_classes["apptimize.ABTApptimizeVariable"] = apptimize_ABTApptimizeVariable
class apptimize_util_PlatformLock:
_hx_class_name = "apptimize.util.PlatformLock"
_hx_is_interface = "True"
__slots__ = ()
_hx_methods = ["acquire", "release"]
apptimize_util_PlatformLock._hx_class = apptimize_util_PlatformLock
_hx_classes["apptimize.util.PlatformLock"] = apptimize_util_PlatformLock
class apptimize_util_PythonPlatformLock:
_hx_class_name = "apptimize.util.PythonPlatformLock"
_hx_is_interface = "False"
__slots__ = ("_lock",)
_hx_fields = ["_lock"]
_hx_methods = ["init", "acquire", "release", "hxUnserialize"]
_hx_interfaces = [apptimize_util_PlatformLock]
def __init__(self):
self._lock = None
self.init()
def init(self):
self._lock = python_lib_threading_RLock()
def acquire(self):
if (self._lock is None):
self._lock = python_lib_threading_RLock()
return self._lock.acquire()
def release(self):
if (self._lock is not None):
self._lock.release()
def hxUnserialize(self,u):
self.init()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._lock = None
apptimize_util_PythonPlatformLock._hx_class = apptimize_util_PythonPlatformLock
_hx_classes["apptimize.util.PythonPlatformLock"] = apptimize_util_PythonPlatformLock
class apptimize_util_ABTDataLock:
_hx_class_name = "apptimize.util.ABTDataLock"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["SYSTEM_DATA_LOCK", "METADATA_LOCK", "CHECK_TIME_LOCK", "INITIALIZATION", "getNewLock"]
@staticmethod
def getNewLock(lockName):
return apptimize_util_PythonPlatformLock()
apptimize_util_ABTDataLock._hx_class = apptimize_util_ABTDataLock
_hx_classes["apptimize.util.ABTDataLock"] = apptimize_util_ABTDataLock
class apptimize_ABTDataStore:
_hx_class_name = "apptimize.ABTDataStore"
_hx_is_interface = "False"
__slots__ = ("resultLogs", "metaDataCache", "newMdFetcher", "propChangeFetcher", "resultDispatchQueue", "sdkParameters")
_hx_fields = ["resultLogs", "metaDataCache", "newMdFetcher", "propChangeFetcher", "resultDispatchQueue", "sdkParameters"]
_hx_methods = ["initialize", "hasMetadata", "_getCurrentEtag", "loadMetaData", "_resetSubmitTimeIfNeeded", "_getMinTimeBetweenPosts", "_submitResultsIfNecessary", "reloadFromDisk", "getRequestlessEnvironment", "getUrlsForMetadata", "onMetadataLoaded", "onMetadataReceived", "overrideMetadata", "getMetaData", "dispatchEnrollmentChangeCallbacks", "makeEnvironment", "writeToDiskIfNeeded", "_saveResultLogs", "addResultLogEntry", "_submitResultLog", "flushTracking", "_flushTrackingInternal", "incrementVariantRunCount", "generateEvent"]
_hx_statics = ["appKey", "serverGuid", "_instance", "resultsLock", "sharedInstance", "clear", "shutdown", "_getLastCheckTime", "_updateLastCheckTime", "_resetCheckTimeIfNeeded", "_getLastSubmitTime", "_updateLastSubmitTime", "getAppKey", "checkForUpdatedMetaData", "_checkForUpdatedMetadataIfNecessary", "getServerGUID", "shouldDisable"]
def __init__(self):
self.propChangeFetcher = None
self.newMdFetcher = None
self.metaDataCache = None
self.resultLogs = None
self.sdkParameters = apptimize_models_ABTSdkParameters(None)
self.resultDispatchQueue = apptimize_util_ABTDispatch("Results Logging Dispatch Queue")
self.resultDispatchQueue.start()
self.newMdFetcher = apptimize_api_ABTSecondaryValuesClient()
self.propChangeFetcher = apptimize_api_ABTSecondaryValuesClient()
self.metaDataCache = haxe_ds_StringMap()
def initialize(self):
self.resultLogs = apptimize_support_persistence_ABTPersistence.loadObject(apptimize_support_persistence_ABTPersistence.kResultLogsKey)
if (self.resultLogs is None):
results_cache_size = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.RESULTS_CACHE_SIZE_KEY)
if (Type.getClass(results_cache_size) == str):
results_cache_size = Std.parseInt(results_cache_size)
if (results_cache_size is None):
apptimize_ABTLogger.e("Invalid value specified for results_cache_size, defaulting to 10",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 101, 'className': "apptimize.ABTDataStore", 'methodName': "initialize"}))
results_cache_size = 10
self.resultLogs = apptimize_util_ABTLRUCache(results_cache_size)
def hasMetadata(self,appKey):
hasMD = False
apptimize_util_ABTDataLock.METADATA_LOCK.acquire()
try:
hasMD = (appKey in self.metaDataCache.h)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.METADATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.METADATA_LOCK.release()
return hasMD
def _getCurrentEtag(self,appKey):
md = self.getMetaData(appKey)
if (md is None):
return None
return md.getEtag()
def loadMetaData(self,appKey):
apptimize_ABTDataStore.appKey = appKey
self.reloadFromDisk()
if (not apptimize_ABTDataStore.shouldDisable()):
if apptimize_ABTDataStore._resetCheckTimeIfNeeded():
apptimize_api_ABTApiClient.sharedInstance().downloadMetaDataForKey(appKey,self._getCurrentEtag(appKey))
def _resetSubmitTimeIfNeeded(self,ignoreAppConfig = None):
if (ignoreAppConfig is None):
ignoreAppConfig = False
resetClock = False
apptimize_util_ABTDataLock.CHECK_TIME_LOCK.acquire()
try:
timeout = self._getMinTimeBetweenPosts(ignoreAppConfig)
currentTime = (Date.now().date.timestamp() * 1000)
timeSinceLastCheck = (currentTime - apptimize_ABTDataStore._getLastSubmitTime())
if (timeSinceLastCheck > timeout):
apptimize_ABTDataStore._updateLastSubmitTime(currentTime)
resetClock = True
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.CHECK_TIME_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.CHECK_TIME_LOCK.release()
return resetClock
def _getMinTimeBetweenPosts(self,ignoreAppConfig = None):
if (ignoreAppConfig is None):
ignoreAppConfig = False
timeout = Std.parseFloat(apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.RESULT_POST_DELAY_MS_KEY))
minTimeout = self.sdkParameters.minPostFrequencyMs
if ignoreAppConfig:
if (minTimeout is None):
return -1
else:
return minTimeout
else:
if ((minTimeout is not None) and ((timeout < minTimeout))):
return minTimeout
return timeout
def _submitResultsIfNecessary(self):
if self._resetSubmitTimeIfNeeded():
self._flushTrackingInternal()
def reloadFromDisk(self):
metadata = apptimize_support_persistence_ABTPersistence.loadObject(apptimize_support_persistence_ABTPersistence.kMetadataKey)
if (metadata is not None):
apptimize_ABTLogger.v("Existing metadata loaded from storage, will update if necessary.",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 256, 'className': "apptimize.ABTDataStore", 'methodName': "reloadFromDisk"}))
self.onMetadataLoaded(metadata)
else:
apptimize_ABTLogger.v("No existing metadata found in storage.",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 259, 'className': "apptimize.ABTDataStore", 'methodName': "reloadFromDisk"}))
def getRequestlessEnvironment(self,md):
anonUserId = "anon"
currentUserId = None
customProperties = None
params = apptimize_filter_ABTFilterEnvParams(currentUserId,anonUserId,customProperties,md.getAppKey(),apptimize_support_properties_ABTApplicationProperties.sharedInstance(),apptimize_support_properties_ABTInternalProperties.sharedInstance())
env = md.makeEnvironment(params,self.sdkParameters)
return env
def getUrlsForMetadata(self,md):
env = self.getRequestlessEnvironment(md)
urls = md.extractNeededSecondaryUrls(env)
return urls
def onMetadataLoaded(self,md):
_gthis = self
self.sdkParameters = md.extractSdkParameters(self.getRequestlessEnvironment(md))
urls = self.getUrlsForMetadata(md)
def _hx_local_0(values,fetched):
currentUrls = _gthis.getUrlsForMetadata(md)
if _gthis.newMdFetcher.needNewUrls(urls,currentUrls):
apptimize_ABTLogger.w("urls changed while fetching values, retrying",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 309, 'className': "apptimize.ABTDataStore", 'methodName': "onMetadataLoaded"}))
_gthis.onMetadataLoaded(md)
return
md.setSecondaryValues(values)
_gthis.overrideMetadata(md,False,fetched)
self.newMdFetcher.fetch(urls,md.getSecondaryValues(),_hx_local_0)
def onMetadataReceived(self,md):
_gthis = self
if self.newMdFetcher.fetching():
apptimize_ABTLogger.e("onMetadataReceived called while fetch already in progress",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 330, 'className': "apptimize.ABTDataStore", 'methodName': "onMetadataReceived"}))
return
self.sdkParameters = md.extractSdkParameters(self.getRequestlessEnvironment(md))
urls = self.getUrlsForMetadata(md)
oldValues = None
oldMD = self.getMetaData(md.getAppKey())
if (oldMD is not None):
oldValues = oldMD.getSecondaryValues()
def _hx_local_0(values,fetched):
currentUrls = _gthis.getUrlsForMetadata(md)
if _gthis.newMdFetcher.needNewUrls(urls,currentUrls):
apptimize_ABTLogger.w("urls changed while fetching values, retrying",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 346, 'className': "apptimize.ABTDataStore", 'methodName': "onMetadataReceived"}))
_gthis.onMetadataReceived(md)
return
md.setSecondaryValues(values)
_gthis.overrideMetadata(md,True,fetched)
self.newMdFetcher.fetch(urls,oldValues,_hx_local_0)
def overrideMetadata(self,md,fromNetwork,secValsChanged):
oldMd = None
isKeyValid = True
mdUpdated = False
apptimize_util_ABTDataLock.METADATA_LOCK.acquire()
try:
isKeyValid = (md.getAppKey() == apptimize_ABTDataStore.getAppKey())
key = md.getAppKey()
oldMd = self.metaDataCache.h.get(key,None)
if isKeyValid:
if ((oldMd is None) or ((oldMd.getSequenceNumber() < md.getSequenceNumber()))):
mdUpdated = True
if (mdUpdated or secValsChanged):
self.metaDataCache.h[key] = md
self.sdkParameters = md.extractSdkParameters(self.getRequestlessEnvironment(md))
if fromNetwork:
apptimize_support_persistence_ABTPersistence.saveObject(apptimize_support_persistence_ABTPersistence.kDisabledVersions,md.getDisabledVersions())
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.METADATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.METADATA_LOCK.release()
md.copyPersistentValues(oldMd)
if ((mdUpdated and fromNetwork) or secValsChanged):
self.writeToDiskIfNeeded()
self.dispatchEnrollmentChangeCallbacks(oldMd,md)
if mdUpdated:
apptimize_events_ABTEventManager.dispatchOnMetadataUpdated()
apptimize_ABTLogger.i((("Updated metadata for app key \"" + HxOverrides.stringOrNull(md.getAppKey())) + "\"."),_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 450, 'className': "apptimize.ABTDataStore", 'methodName': "overrideMetadata"}))
apptimize_ABTLogger.v(("New metadata:\n" + HxOverrides.stringOrNull(haxe_format_JsonPrinter.print(md.getMetaData(),None," "))),_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 452, 'className': "apptimize.ABTDataStore", 'methodName': "overrideMetadata"}))
if ((oldMd is None) and apptimize_ApptimizeInternal._trySetReady()):
apptimize_events_ABTEventManager.dispatchOnApptimizeInitialized()
else:
apptimize_ABTLogger.i("Existing metadata is up-to-date.",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 460, 'className': "apptimize.ABTDataStore", 'methodName': "overrideMetadata"}))
def getMetaData(self,appKey):
if (self.metaDataCache is None):
return None
md = None
apptimize_util_ABTDataLock.METADATA_LOCK.acquire()
try:
md = self.metaDataCache.h.get(appKey,None)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.METADATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.METADATA_LOCK.release()
if ((md is not None) and ((md.getAppKey() != appKey))):
apptimize_ABTLogger.e("Metadata appkey does not match requested key",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 477, 'className': "apptimize.ABTDataStore", 'methodName': "getMetaData"}))
return None
return md
def dispatchEnrollmentChangeCallbacks(self,oldMetadata,newMetadata):
pass
def makeEnvironment(self,params):
metadata = self.getMetaData(params.appkey)
if (metadata is None):
return None
return metadata.makeEnvironment(params,self.sdkParameters)
def writeToDiskIfNeeded(self):
md = self.getMetaData(apptimize_ABTDataStore.getAppKey())
if (md is not None):
apptimize_support_persistence_ABTPersistence.saveObject(apptimize_support_persistence_ABTPersistence.kMetadataKey,md,None,True)
def _saveResultLogs(self):
apptimize_support_persistence_ABTPersistence.saveObject(apptimize_support_persistence_ABTPersistence.kResultLogsKey,self.resultLogs,None,True)
def addResultLogEntry(self,env,entry):
logKey = env.getUniqueUserID()
apptimize_ABTDataStore.resultsLock.acquire()
try:
resultLog = self.resultLogs.getValue(logKey)
if (resultLog is None):
resultLog = apptimize_models_results_ABTResultLog(env)
self.resultLogs.insert(logKey,resultLog,self._submitResultLog,self.resultDispatchQueue)
resultLog.logEntry(entry)
if (resultLog.entryCount() > apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_ENTRIES_KEY)):
self.resultLogs.remove(logKey,self._submitResultLog,self.resultDispatchQueue)
self._saveResultLogs()
self._submitResultsIfNecessary()
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_ABTDataStore.resultsLock.release()
raise haxe_Exception.thrown(e)
apptimize_ABTDataStore.resultsLock.release()
def _submitResultLog(self,log):
if apptimize_ABTDataStore.shouldDisable():
return
apptimize_api_ABTApiClient.sharedInstance().postResultsForKey(log.getAppKey(),log)
self._saveResultLogs()
def flushTracking(self):
if self._resetSubmitTimeIfNeeded(True):
self._flushTrackingInternal()
def _flushTrackingInternal(self):
apptimize_ABTLogger.v("Posting results...",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 732, 'className': "apptimize.ABTDataStore", 'methodName': "_flushTrackingInternal"}))
apptimize_ABTDataStore.resultsLock.acquire()
try:
self.resultLogs.clear(self._submitResultLog,self.resultDispatchQueue)
self._saveResultLogs()
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_ABTDataStore.resultsLock.release()
raise haxe_Exception.thrown(e)
apptimize_ABTDataStore.resultsLock.release()
def incrementVariantRunCount(self,params,variant):
if (Type.getClass(variant) == apptimize_models_ABTHotfixVariant):
return
variantStickyString = ((("v" + Std.string(variant.getVariantID())) + "_") + Std.string(variant.getCycle()))
experimentStickyString = ((("e" + Std.string(variant.getExperimentID())) + "_") + Std.string(variant.getCycle()))
phase = variant.getPhase()
isFirstParticipation = False
env = self.makeEnvironment(params)
variantShownEntry = apptimize_models_results_ABTResultEntryVariantShown(env,variant.getVariantID(),variant.getCycle(),phase)
apptimize_ABTLogger.v((("Incrementing variant run count for variant ID \"" + Std.string(variant.getVariantID())) + "\"."),_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 767, 'className': "apptimize.ABTDataStore", 'methodName': "incrementVariantRunCount"}))
variantInfo = apptimize_VariantInfo.initWithVariant(variant,env.userID,env.anonID)
apptimize_events_ABTEventManager.dispatchOnParticipatedInExperiment(variantInfo,isFirstParticipation)
self.addResultLogEntry(env,variantShownEntry)
def generateEvent(self,eventName,eventSource,eventAttributes,params):
if ((not apptimize_Apptimize._isInitialized()) or ((apptimize_ABTDataStore.sharedInstance().getMetaData(params.appkey) is None))):
apptimize_ABTLogger.w((("Event \"" + ("null" if eventName is None else eventName)) + "\" will not be tracked until Apptimize.setup() is called and MetaData available."),_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 778, 'className': "apptimize.ABTDataStore", 'methodName': "generateEvent"}))
return
env = self.makeEnvironment(params)
eventEntry = apptimize_models_results_ABTResultEntryEvent(env,eventName,eventSource,eventAttributes)
logString = (("Event \"" + ("null" if eventName is None else eventName)) + "\"")
if (eventAttributes is not None):
logString = ((("null" if logString is None else logString) + " with value ") + Std.string(eventAttributes.h.get(apptimize_ApptimizeInternal.kABTValueEventKey,None)))
logString = (("null" if logString is None else logString) + " tracked.")
apptimize_ABTLogger.v(logString,_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 792, 'className': "apptimize.ABTDataStore", 'methodName': "generateEvent"}))
self.addResultLogEntry(env,eventEntry)
appKey = None
serverGuid = None
_instance = None
@staticmethod
def sharedInstance():
if (apptimize_ABTDataStore._instance is None):
apptimize_ABTDataStore._instance = apptimize_ABTDataStore()
return apptimize_ABTDataStore._instance
@staticmethod
def clear():
apptimize_support_persistence_ABTPersistence.clear()
apptimize_ABTDataStore._instance = None
@staticmethod
def shutdown():
apptimize_ABTDataStore._instance = None
apptimize_ABTDataStore.appKey = None
apptimize_ABTDataStore.serverGuid = None
@staticmethod
def _getLastCheckTime():
lastCheckTime = apptimize_support_persistence_ABTPersistence.loadString(apptimize_support_persistence_ABTPersistence.kMetadataLastCheckTimeKey)
if (lastCheckTime is not None):
return Std.parseFloat(lastCheckTime)
return -10000.0
@staticmethod
def _updateLastCheckTime(checkTime):
apptimize_support_persistence_ABTPersistence.saveString(apptimize_support_persistence_ABTPersistence.kMetadataLastCheckTimeKey,Std.string(checkTime))
@staticmethod
def _resetCheckTimeIfNeeded():
resetClock = False
apptimize_util_ABTDataLock.CHECK_TIME_LOCK.acquire()
try:
timeout = Std.parseFloat(apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_INTERVAL_MS_KEY))
currentTime = (Date.now().date.timestamp() * 1000)
timeSinceLastCheck = (currentTime - apptimize_ABTDataStore._getLastCheckTime())
if (timeSinceLastCheck > timeout):
apptimize_ABTDataStore._updateLastCheckTime(currentTime)
resetClock = True
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.CHECK_TIME_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.CHECK_TIME_LOCK.release()
return resetClock
@staticmethod
def _getLastSubmitTime():
lastSubmitCheckTime = apptimize_support_persistence_ABTPersistence.loadString(apptimize_support_persistence_ABTPersistence.kResultLastSubmitTimeKey)
if (lastSubmitCheckTime is not None):
return Std.parseFloat(lastSubmitCheckTime)
return -10000.0
@staticmethod
def _updateLastSubmitTime(checkTime):
apptimize_support_persistence_ABTPersistence.saveString(apptimize_support_persistence_ABTPersistence.kResultLastSubmitTimeKey,Std.string(checkTime))
@staticmethod
def getAppKey():
return apptimize_ABTDataStore.appKey
@staticmethod
def checkForUpdatedMetaData(checkImmediately = None):
if (checkImmediately is None):
checkImmediately = False
if apptimize_ABTDataStore.shouldDisable():
apptimize_ABTLogger.w("This SDK version disabled; not checking for updated metadata",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 218, 'className': "apptimize.ABTDataStore", 'methodName': "checkForUpdatedMetaData"}))
return
apptimize_ABTLogger.v("Checking for updated metadata...",_hx_AnonObject({'fileName': "src/apptimize/ABTDataStore.hx", 'lineNumber': 221, 'className': "apptimize.ABTDataStore", 'methodName': "checkForUpdatedMetaData"}))
if (apptimize_Apptimize._isInitialized() and ((apptimize_ABTDataStore._resetCheckTimeIfNeeded() or checkImmediately))):
apptimize_api_ABTApiClient.sharedInstance().downloadMetaDataForKey(apptimize_ABTDataStore.getAppKey(),apptimize_ABTDataStore.sharedInstance()._getCurrentEtag(apptimize_ABTDataStore.getAppKey()))
@staticmethod
def _checkForUpdatedMetadataIfNecessary():
if ((not apptimize_api_ABTMetadataPoller.isPolling()) and ((apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_INTERVAL_MS_KEY) >= 0))):
apptimize_ABTDataStore.checkForUpdatedMetaData()
apptimize_ABTDataStore.sharedInstance()._submitResultsIfNecessary()
@staticmethod
def getServerGUID():
if (apptimize_ABTDataStore.serverGuid is None):
apptimize_ABTDataStore.serverGuid = apptimize_api_ABTUserGuid.generateUserGuid()
return apptimize_ABTDataStore.serverGuid
@staticmethod
def shouldDisable():
disabledVersions = apptimize_support_persistence_ABTPersistence.loadObject(apptimize_support_persistence_ABTPersistence.kDisabledVersions)
if (disabledVersions is None):
return False
if (python_internal_ArrayImpl.indexOf(disabledVersions,apptimize_Apptimize.getApptimizeSDKVersion(),None) > -1):
return True
return False
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.resultLogs = None
_hx_o.metaDataCache = None
_hx_o.newMdFetcher = None
_hx_o.propChangeFetcher = None
_hx_o.resultDispatchQueue = None
_hx_o.sdkParameters = None
apptimize_ABTDataStore._hx_class = apptimize_ABTDataStore
_hx_classes["apptimize.ABTDataStore"] = apptimize_ABTDataStore
class apptimize_ABTLogger:
_hx_class_name = "apptimize.ABTLogger"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["LOG_LEVEL_VERBOSE", "LOG_LEVEL_DEBUG", "LOG_LEVEL_INFO", "LOG_LEVEL_WARN", "LOG_LEVEL_ERROR", "LOG_LEVEL_NONE", "logLevel", "useTraceForLogging", "logLevelFromString", "setLogLevel", "c", "e", "w", "i", "d", "v", "log_out", "redirectTraceStatements", "traceInternal", "trace"]
@staticmethod
def logLevelFromString(logLevel):
level = logLevel.upper()
if (level == "LOG_LEVEL_VERBOSE"):
return apptimize_ABTLogger.LOG_LEVEL_VERBOSE
if (level == "LOG_LEVEL_DEBUG"):
return apptimize_ABTLogger.LOG_LEVEL_DEBUG
if (level == "LOG_LEVEL_INFO"):
return apptimize_ABTLogger.LOG_LEVEL_INFO
if (level == "LOG_LEVEL_WARN"):
return apptimize_ABTLogger.LOG_LEVEL_WARN
if (level == "LOG_LEVEL_ERROR"):
return apptimize_ABTLogger.LOG_LEVEL_ERROR
if (level == "LOG_LEVEL_NONE"):
return apptimize_ABTLogger.LOG_LEVEL_NONE
return apptimize_ABTLogger.LOG_LEVEL_NONE
@staticmethod
def setLogLevel(level):
apptimize_ABTLogger.logLevel = level
@staticmethod
def c(msg,infos = None):
apptimize_ABTLogger.log_out(msg,infos)
apptimize_util_ABTException.throwException(msg)
@staticmethod
def e(msg,infos = None):
if (apptimize_ABTLogger.logLevel <= apptimize_ABTLogger.LOG_LEVEL_ERROR):
apptimize_ABTLogger.log_out(msg,infos)
@staticmethod
def w(msg,infos = None):
if (apptimize_ABTLogger.logLevel <= apptimize_ABTLogger.LOG_LEVEL_WARN):
apptimize_ABTLogger.log_out(msg,infos)
@staticmethod
def i(msg,infos = None):
if (apptimize_ABTLogger.logLevel <= apptimize_ABTLogger.LOG_LEVEL_INFO):
apptimize_ABTLogger.log_out(msg,infos)
@staticmethod
def d(msg,infos = None):
if (apptimize_ABTLogger.logLevel <= apptimize_ABTLogger.LOG_LEVEL_DEBUG):
apptimize_ABTLogger.log_out(msg,infos)
@staticmethod
def v(msg,infos = None):
if (apptimize_ABTLogger.logLevel <= apptimize_ABTLogger.LOG_LEVEL_VERBOSE):
apptimize_ABTLogger.log_out(msg,infos)
@staticmethod
def log_out(msg,infos):
if (apptimize_ABTLogger.useTraceForLogging and ((haxe_Log.trace != apptimize_ABTLogger.trace))):
haxe_Log.trace(msg,infos)
return
apptimize_ABTLogger.traceInternal(("Apptimize: " + ("null" if msg is None else msg)),infos)
@staticmethod
def redirectTraceStatements():
haxe_Log.trace = apptimize_ABTLogger.trace
@staticmethod
def traceInternal(_hx_str,infos = None):
str1 = Std.string(_hx_str)
python_Lib.printString((("" + ("null" if str1 is None else str1)) + HxOverrides.stringOrNull(python_Lib.lineEnd)))
@staticmethod
def trace(val,infos = None):
_hx_str = Std.string(val)
apptimize_ABTLogger.v(_hx_str,infos)
apptimize_ABTLogger._hx_class = apptimize_ABTLogger
_hx_classes["apptimize.ABTLogger"] = apptimize_ABTLogger
class apptimize_ApptimizeInternal:
_hx_class_name = "apptimize.ApptimizeInternal"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["kABTEventSourceApptimize", "kABTValueEventKey", "_state", "_isInitialized", "setState", "_trySetReady", "_setup", "shutdown", "generateTrackEvent", "_getCodeBlockMethod", "_getVariants", "_getAlterations"]
@staticmethod
def _isInitialized():
result = False
apptimize_util_ABTDataLock.INITIALIZATION.acquire()
try:
result = ((apptimize_ApptimizeInternal._state == 2) or ((apptimize_ApptimizeInternal._state == 3)))
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.INITIALIZATION.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.INITIALIZATION.release()
return result
@staticmethod
def setState(state):
apptimize_util_ABTDataLock.INITIALIZATION.acquire()
try:
apptimize_ApptimizeInternal._state = state
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.INITIALIZATION.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.INITIALIZATION.release()
@staticmethod
def _trySetReady():
result = False
apptimize_util_ABTDataLock.INITIALIZATION.acquire()
try:
if (apptimize_ApptimizeInternal._state == 2):
result = True
apptimize_ApptimizeInternal._state = 3
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.INITIALIZATION.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.INITIALIZATION.release()
return result
@staticmethod
def _setup(appKey,configAttributes,setupComplete):
canInitialize = True
apptimize_util_ABTDataLock.INITIALIZATION.acquire()
try:
if (apptimize_ApptimizeInternal._state != 0):
apptimize_ABTLogger.w("Apptimize setup has already been called.",_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 68, 'className': "apptimize.ApptimizeInternal", 'methodName': "_setup"}))
canInitialize = False
else:
apptimize_ApptimizeInternal._state = 1
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.INITIALIZATION.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.INITIALIZATION.release()
if (canInitialize == False):
return
apptimize_ABTLogger.redirectTraceStatements()
configProps = apptimize_support_properties_ABTConfigProperties.sharedInstance()
if (configAttributes is not None):
configProps.setProperties(apptimize_util_ABTUtilDictionary.nativeObjectToStringMap(configAttributes))
if configProps.isPropertyAvailable(apptimize_support_properties_ABTConfigProperties.LOG_LEVEL_KEY):
logLevel = configProps.valueForProperty(apptimize_support_properties_ABTConfigProperties.LOG_LEVEL_KEY)
apptimize_ABTLogger.setLogLevel(apptimize_ABTLogger.logLevelFromString(logLevel))
if (configProps.isPropertyAvailable(apptimize_support_properties_ABTConfigProperties.APPTIMIZE_ENVIRONMENT_KEY) or configProps.isPropertyAvailable(apptimize_support_properties_ABTConfigProperties.APPTIMIZE_REGION_KEY)):
environment = configProps.valueForProperty(apptimize_support_properties_ABTConfigProperties.APPTIMIZE_ENVIRONMENT_KEY)
if (environment is None):
environment = "prod"
region = configProps.valueForProperty(apptimize_support_properties_ABTConfigProperties.APPTIMIZE_REGION_KEY)
if (region is None):
region = "default"
region1 = region
_hx_local_0 = len(region1)
if (_hx_local_0 == 4):
if (region1 == "eucs"):
environment1 = environment
_hx_local_1 = len(environment1)
if (_hx_local_1 == 5):
if (environment1 == "local"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://local.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://local.apptimize.co")
else:
pass
elif (_hx_local_1 == 4):
if (environment1 == "prod"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_LL_KEY,"https://md-ll.apptimize.eu/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_HL_KEY,"https://md-hl.apptimize.eu/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://mapi.apptimize.eu")
else:
pass
elif (_hx_local_1 == 7):
if (environment1 == "staging"):
if (region != "default"):
apptimize_ABTLogger.v((("Apptimize region '" + ("null" if region is None else region)) + "' does not have a staging environment. Falling back to default region."),_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 109, 'className': "apptimize.ApptimizeInternal", 'methodName': "_setup"}))
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://staging-md.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://staging-mapi.apptimize.co")
else:
pass
else:
pass
else:
environment1 = environment
_hx_local_2 = len(environment1)
if (_hx_local_2 == 5):
if (environment1 == "local"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://local.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://local.apptimize.co")
else:
pass
elif (_hx_local_2 == 7):
if (environment1 == "staging"):
if (region != "default"):
apptimize_ABTLogger.v((("Apptimize region '" + ("null" if region is None else region)) + "' does not have a staging environment. Falling back to default region."),_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 109, 'className': "apptimize.ApptimizeInternal", 'methodName': "_setup"}))
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://staging-md.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://staging-mapi.apptimize.co")
else:
pass
else:
pass
elif (_hx_local_0 == 3):
if (region1 == "gcp"):
environment1 = environment
_hx_local_3 = len(environment1)
if (_hx_local_3 == 5):
if (environment1 == "local"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://local.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://local.apptimize.co")
else:
pass
elif (_hx_local_3 == 7):
if (environment1 == "staging"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://gcp-stag-md.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://gcp-stag-mapi.apptimize.co")
else:
pass
else:
pass
else:
environment1 = environment
_hx_local_4 = len(environment1)
if (_hx_local_4 == 5):
if (environment1 == "local"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://local.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://local.apptimize.co")
else:
pass
elif (_hx_local_4 == 7):
if (environment1 == "staging"):
if (region != "default"):
apptimize_ABTLogger.v((("Apptimize region '" + ("null" if region is None else region)) + "' does not have a staging environment. Falling back to default region."),_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 109, 'className': "apptimize.ApptimizeInternal", 'methodName': "_setup"}))
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://staging-md.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://staging-mapi.apptimize.co")
else:
pass
else:
pass
else:
environment1 = environment
_hx_local_5 = len(environment1)
if (_hx_local_5 == 5):
if (environment1 == "local"):
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://local.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://local.apptimize.co")
else:
pass
elif (_hx_local_5 == 7):
if (environment1 == "staging"):
if (region != "default"):
apptimize_ABTLogger.v((("Apptimize region '" + ("null" if region is None else region)) + "' does not have a staging environment. Falling back to default region."),_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 109, 'className': "apptimize.ApptimizeInternal", 'methodName': "_setup"}))
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY,"https://staging-md.apptimize.co/api/metadata/v4/")
configProps.setProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY,"https://staging-mapi.apptimize.co")
else:
pass
else:
pass
if (appKey is not None):
apptimize_ABTLogger.v((("Initializing Apptimize with app key: \"" + ("null" if appKey is None else appKey)) + "\"."),_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 120, 'className': "apptimize.ApptimizeInternal", 'methodName': "_setup"}))
apptimize_ABTDataStore.sharedInstance().initialize()
apptimize_support_initialize_ABTPlatformInitialize.initialize()
threadCount = 8
if configProps.isPropertyAvailable(apptimize_support_properties_ABTConfigProperties.RESULT_POST_THREAD_POOL_SIZE_KEY):
threadCount = configProps.valueForProperty(apptimize_support_properties_ABTConfigProperties.RESULT_POST_THREAD_POOL_SIZE_KEY)
def _hx_local_6():
apptimize_api_ABTApiResultsPost.loadPendingLogs()
setupComplete()
apptimize_api_ABTApiResultsPost.startDispatching(threadCount)
apptimize_support_persistence_ABTPersistence.loadFromHighLatency(_hx_local_6)
@staticmethod
def shutdown():
apptimize_util_ABTDataLock.INITIALIZATION.acquire()
try:
if ((apptimize_ApptimizeInternal._state == 2) or ((apptimize_ApptimizeInternal._state == 3))):
apptimize_api_ABTApiResultsPost.savePendingLogs()
apptimize_support_persistence_ABTPersistence.saveToHighLatency()
apptimize_support_initialize_ABTPlatformInitialize.shutdownPlatform()
apptimize_ABTDataStore.shutdown()
apptimize_support_persistence_ABTPersistence.shutdown()
apptimize_ApptimizeInternal._state = 0
apptimize_ABTLogger.i("Apptimize has shutdown",_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 151, 'className': "apptimize.ApptimizeInternal", 'methodName': "shutdown"}))
else:
apptimize_ABTLogger.w("Apptimize is not initialized. Unable to shutdown().",_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 153, 'className': "apptimize.ApptimizeInternal", 'methodName': "shutdown"}))
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.INITIALIZATION.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.INITIALIZATION.release()
@staticmethod
def generateTrackEvent(envParams,eventName,eventValue):
value = None
if (eventValue is not None):
_g = haxe_ds_StringMap()
_g.h[apptimize_ApptimizeInternal.kABTValueEventKey] = eventValue
value = _g
apptimize_ABTDataStore.sharedInstance().generateEvent(eventName,apptimize_ApptimizeInternal.kABTEventSourceApptimize,value,envParams)
@staticmethod
def _getCodeBlockMethod(envParams,codeBlockVariableName):
cbVarName = codeBlockVariableName
if ((cbVarName is None) or ((cbVarName == ""))):
apptimize_ABTLogger.w("Attempting to runCodeBlock() without specifying a code block name! Returning baseline original variant.",_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 170, 'className': "apptimize.ApptimizeInternal", 'methodName': "_getCodeBlockMethod"}))
else:
_g = 0
_g1 = apptimize_ApptimizeInternal._getAlterations(envParams)
while (_g < len(_g1)):
alteration = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if ((alteration is not None) and ((Type.getClass(alteration) == apptimize_models_ABTBlockAlteration))):
block = alteration
variant = block.getVariant()
if (variant.getCodeBlockName() != cbVarName):
continue
apptimize_ABTDataStore.sharedInstance().incrementVariantRunCount(envParams,block.getVariant())
return block.methodName
apptimize_ABTLogger.w((("Not participating in any code block experiments with name \"" + ("null" if codeBlockVariableName is None else codeBlockVariableName)) + "\". Returning baseline original variant."),_hx_AnonObject({'fileName': "src/apptimize/ApptimizeInternal.hx", 'lineNumber': 185, 'className': "apptimize.ApptimizeInternal", 'methodName': "_getCodeBlockMethod"}))
return "baseline"
@staticmethod
def _getVariants(envParams):
apptimize_ABTDataStore._checkForUpdatedMetadataIfNecessary()
alterations = apptimize_ApptimizeInternal._getAlterations(envParams)
variants = haxe_ds_IntMap()
_g = 0
while (_g < len(alterations)):
alteration = (alterations[_g] if _g >= 0 and _g < len(alterations) else None)
_g = (_g + 1)
variant = alteration.getVariant()
if ((Type.getClass(variant) != apptimize_models_ABTHotfixVariant) and (not (variant.getVariantID() in variants.h))):
variants.set(variant.getVariantID(),variant)
return Lambda.array(variants)
@staticmethod
def _getAlterations(envParams):
alterations = []
store = apptimize_ABTDataStore.sharedInstance()
metadata = store.getMetaData(envParams.appkey)
if (metadata is not None):
alterations = metadata.selectAlterationsIntoArray(store.makeEnvironment(envParams))
return alterations
apptimize_ApptimizeInternal._hx_class = apptimize_ApptimizeInternal
_hx_classes["apptimize.ApptimizeInternal"] = apptimize_ApptimizeInternal
class apptimize_ApptimizeExperimentType(Enum):
__slots__ = ()
_hx_class_name = "apptimize.ApptimizeExperimentType"
_hx_constructs = ["CodeBlock", "FeatureFlag", "DynamicVariables", "Visual", "Unknown", "FeatureVariables"]
apptimize_ApptimizeExperimentType.CodeBlock = apptimize_ApptimizeExperimentType("CodeBlock", 0, ())
apptimize_ApptimizeExperimentType.FeatureFlag = apptimize_ApptimizeExperimentType("FeatureFlag", 1, ())
apptimize_ApptimizeExperimentType.DynamicVariables = apptimize_ApptimizeExperimentType("DynamicVariables", 2, ())
apptimize_ApptimizeExperimentType.Visual = apptimize_ApptimizeExperimentType("Visual", 3, ())
apptimize_ApptimizeExperimentType.Unknown = apptimize_ApptimizeExperimentType("Unknown", 4, ())
apptimize_ApptimizeExperimentType.FeatureVariables = apptimize_ApptimizeExperimentType("FeatureVariables", 5, ())
apptimize_ApptimizeExperimentType._hx_class = apptimize_ApptimizeExperimentType
_hx_classes["apptimize.ApptimizeExperimentType"] = apptimize_ApptimizeExperimentType
class apptimize_api_ABTApiClient:
_hx_class_name = "apptimize.api.ABTApiClient"
_hx_is_interface = "False"
__slots__ = ()
_hx_methods = ["downloadMetaDataForKey", "postResultsForKey"]
_hx_statics = ["_instance", "sharedInstance"]
def __init__(self):
pass
def downloadMetaDataForKey(self,appKey,etag):
def _hx_local_0(json,etag):
md = apptimize_models_ABTMetadata.loadFromString(json)
md.setEtag(etag)
apptimize_ABTDataStore.sharedInstance().onMetadataReceived(md)
def _hx_local_1(error):
apptimize_ABTLogger.e(("Failed to download metadata with error: " + ("null" if error is None else error)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiClient.hx", 'lineNumber': 24, 'className': "apptimize.api.ABTApiClient", 'methodName': "downloadMetaDataForKey"}))
mdRequest = apptimize_api_ABTApiMetadataRequest(self,appKey,etag,_hx_local_0,_hx_local_1)
def postResultsForKey(self,appKey,log):
def _hx_local_0(response):
pass
def _hx_local_1(response):
pass
resultsRequest = apptimize_api_ABTApiResultsPost(self,appKey,log,_hx_local_0,_hx_local_1)
apptimize_api_ABTApiResultsPost.pushRequest(resultsRequest)
_instance = None
@staticmethod
def sharedInstance():
if (apptimize_api_ABTApiClient._instance is None):
apptimize_api_ABTApiClient._instance = apptimize_api_ABTApiClient()
return apptimize_api_ABTApiClient._instance
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_api_ABTApiClient._hx_class = apptimize_api_ABTApiClient
_hx_classes["apptimize.api.ABTApiClient"] = apptimize_api_ABTApiClient
class apptimize_api_ABTApiMetadataRequest:
_hx_class_name = "apptimize.api.ABTApiMetadataRequest"
_hx_is_interface = "False"
__slots__ = ("apiClient", "appKey", "successCallback", "failureCallback")
_hx_fields = ["apiClient", "appKey", "successCallback", "failureCallback"]
_hx_methods = ["_apiSuccess", "_getMetadataUrl", "_headersForRequest"]
def __init__(self,client,applicationKey,etag,success,failure):
self.failureCallback = None
self.successCallback = None
self.appKey = None
self.apiClient = None
_gthis = self
self.apiClient = client
self.appKey = applicationKey
self.successCallback = success
self.failureCallback = failure
url = self._getMetadataUrl()
url = (("null" if url is None else url) + ("null" if applicationKey is None else applicationKey))
apptimize_ABTLogger.v(("Checking for new metadata from url: " + ("null" if url is None else url)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiMetadataRequest.hx", 'lineNumber': 27, 'className': "apptimize.api.ABTApiMetadataRequest", 'methodName': "new"}))
def _hx_local_1(response):
_gthis._apiSuccess(response)
def _hx_local_2(response):
_gthis.failureCallback(response.text)
apptimize_http_ABTHttpRequest.get(url,self._headersForRequest(etag),_hx_local_1,_hx_local_2)
def _apiSuccess(self,response):
if (response.responseCode == 304):
apptimize_ABTLogger.v("Got HTTP response 304, metadata not updated.",_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiMetadataRequest.hx", 'lineNumber': 38, 'className': "apptimize.api.ABTApiMetadataRequest", 'methodName': "_apiSuccess"}))
return
json = apptimize_api_ABTMetadataProcessor.jsonFromMetadataDownload(response.bytes)
if (json is not None):
apptimize_ABTLogger.v("Request for metadata completed successfully.",_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiMetadataRequest.hx", 'lineNumber': 45, 'className': "apptimize.api.ABTApiMetadataRequest", 'methodName': "_apiSuccess"}))
self.successCallback(json,response.etag)
else:
errorString = "Failed to download metadata with error: response was empty."
apptimize_ABTLogger.w(errorString,_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiMetadataRequest.hx", 'lineNumber': 49, 'className': "apptimize.api.ABTApiMetadataRequest", 'methodName': "_apiSuccess"}))
self.failureCallback(errorString)
def _getMetadataUrl(self):
metadataUrl = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY)
if (metadataUrl is not None):
return metadataUrl
elif apptimize_ABTDataStore.sharedInstance().hasMetadata(self.appKey):
return apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_HL_KEY)
else:
return apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.META_DATA_URL_LL_KEY)
def _headersForRequest(self,etag):
if (etag is not None):
_g = haxe_ds_StringMap()
_g.h["ETag"] = etag
_g.h["If-None-Match"] = etag
return _g
else:
return None
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.apiClient = None
_hx_o.appKey = None
_hx_o.successCallback = None
_hx_o.failureCallback = None
apptimize_api_ABTApiMetadataRequest._hx_class = apptimize_api_ABTApiMetadataRequest
_hx_classes["apptimize.api.ABTApiMetadataRequest"] = apptimize_api_ABTApiMetadataRequest
class hx_concurrent_atomic__AtomicInt_AtomicIntImpl:
_hx_class_name = "hx.concurrent.atomic._AtomicInt.AtomicIntImpl"
_hx_is_interface = "False"
__slots__ = ("lock", "_value")
_hx_fields = ["lock", "_value"]
_hx_methods = ["get_value", "set_value", "getAndIncrement", "incrementAndGet"]
def __init__(self,initialValue = None):
if (initialValue is None):
initialValue = 0
self.lock = hx_concurrent_lock_RLock()
self._value = initialValue
def get_value(self):
self.lock.acquire()
result = self._value
self.lock.release()
return result
def set_value(self,val):
self.lock.acquire()
self._value = val
self.lock.release()
return val
def getAndIncrement(self,amount = None):
if (amount is None):
amount = 1
self.lock.acquire()
old = self._value
_hx_local_0 = self
_hx_local_1 = _hx_local_0._value
_hx_local_0._value = (_hx_local_1 + amount)
_hx_local_0._value
self.lock.release()
return old
def incrementAndGet(self,amount = None):
if (amount is None):
amount = 1
self.lock.acquire()
result = self
def _hx_local_1():
result._value = (result._value + amount)
return result._value
result1 = _hx_local_1()
self.lock.release()
return result1
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.lock = None
_hx_o._value = None
hx_concurrent_atomic__AtomicInt_AtomicIntImpl._hx_class = hx_concurrent_atomic__AtomicInt_AtomicIntImpl
_hx_classes["hx.concurrent.atomic._AtomicInt.AtomicIntImpl"] = hx_concurrent_atomic__AtomicInt_AtomicIntImpl
class haxe_IMap:
_hx_class_name = "haxe.IMap"
_hx_is_interface = "True"
__slots__ = ()
haxe_IMap._hx_class = haxe_IMap
_hx_classes["haxe.IMap"] = haxe_IMap
class haxe_ds_StringMap:
_hx_class_name = "haxe.ds.StringMap"
_hx_is_interface = "False"
__slots__ = ("h",)
_hx_fields = ["h"]
_hx_methods = ["remove", "keys", "copy"]
_hx_interfaces = [haxe_IMap]
def __init__(self):
self.h = dict()
def remove(self,key):
has = (key in self.h)
if has:
del self.h[key]
return has
def keys(self):
return python_HaxeIterator(iter(self.h.keys()))
def copy(self):
copied = haxe_ds_StringMap()
key = self.keys()
while key.hasNext():
key1 = key.next()
value = self.h.get(key1,None)
copied.h[key1] = value
return copied
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
haxe_ds_StringMap._hx_class = haxe_ds_StringMap
_hx_classes["haxe.ds.StringMap"] = haxe_ds_StringMap
class hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedList_Impl_:
_hx_class_name = "hx.concurrent.collection._SynchronizedLinkedList.SynchronizedLinkedList_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_new"]
@staticmethod
def _new(initialValues = None):
this1 = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedListImpl()
if (initialValues is not None):
this1.addAll(initialValues)
return this1
hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedList_Impl_._hx_class = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedList_Impl_
_hx_classes["hx.concurrent.collection._SynchronizedLinkedList.SynchronizedLinkedList_Impl_"] = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedList_Impl_
class hx_concurrent_collection_Collection:
_hx_class_name = "hx.concurrent.collection.Collection"
_hx_is_interface = "True"
__slots__ = ()
_hx_methods = ["iterator"]
hx_concurrent_collection_Collection._hx_class = hx_concurrent_collection_Collection
_hx_classes["hx.concurrent.collection.Collection"] = hx_concurrent_collection_Collection
class hx_concurrent_collection_OrderedCollection:
_hx_class_name = "hx.concurrent.collection.OrderedCollection"
_hx_is_interface = "True"
__slots__ = ()
_hx_interfaces = [hx_concurrent_collection_Collection]
hx_concurrent_collection_OrderedCollection._hx_class = hx_concurrent_collection_OrderedCollection
_hx_classes["hx.concurrent.collection.OrderedCollection"] = hx_concurrent_collection_OrderedCollection
class hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedListImpl:
_hx_class_name = "hx.concurrent.collection._SynchronizedLinkedList.SynchronizedLinkedListImpl"
_hx_is_interface = "False"
__slots__ = ("_items", "_sync")
_hx_fields = ["_items", "_sync"]
_hx_methods = ["get_length", "add", "addAll", "clear", "remove", "indexOf", "iterator", "toArray"]
_hx_interfaces = [hx_concurrent_collection_OrderedCollection]
def __init__(self):
self._sync = hx_concurrent_lock_RLock()
self._items = haxe_ds_List()
def get_length(self):
_gthis = self
def _hx_local_2():
def _hx_local_1():
_hx_len = 0
_g_head = _gthis._items.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
item = val
_hx_len = (_hx_len + 1)
return _hx_len
return self._sync.execute(_hx_local_1)
return _hx_local_2()
def add(self,item):
_gthis = self
def _hx_local_0():
_gthis._items.add(item)
self._sync.execute(_hx_local_0)
def addAll(self,coll):
_gthis = self
def _hx_local_1():
_g = coll
tmp = _g.index
if (tmp == 0):
coll1 = _g.params[0]
i = coll1.iterator()
while i.hasNext():
i1 = i.next()
_gthis._items.add(i1)
elif (tmp == 1):
arr = _g.params[0]
_g1 = 0
while (_g1 < len(arr)):
i = (arr[_g1] if _g1 >= 0 and _g1 < len(arr) else None)
_g1 = (_g1 + 1)
_gthis._items.add(i)
elif (tmp == 2):
_hx_list = _g.params[0]
_g_head = _hx_list.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
i = val
_gthis._items.add(i)
else:
pass
self._sync.execute(_hx_local_1)
def clear(self):
_gthis = self
def _hx_local_2():
def _hx_local_1():
def _hx_local_0():
_gthis._items = haxe_ds_List()
return _gthis._items
return _hx_local_0()
return _hx_local_1()
self._sync.execute(_hx_local_2)
def remove(self,x):
_gthis = self
def _hx_local_1():
def _hx_local_0():
if (_gthis.indexOf(x) == -1):
return False
return _gthis._items.remove(x)
return self._sync.execute(_hx_local_0)
return _hx_local_1()
def indexOf(self,x,startAt = None):
if (startAt is None):
startAt = 0
_gthis = self
def _hx_local_2():
def _hx_local_1():
i = 0
_g_head = _gthis._items.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
item = val
if ((i >= startAt) and (HxOverrides.eq(item,x))):
return i
i = (i + 1)
return -1
return self._sync.execute(_hx_local_1)
return _hx_local_2()
def iterator(self):
_gthis = self
def _hx_local_1():
def _hx_local_0():
return haxe_ds__List_ListIterator(_gthis._items.h)
return self._sync.execute(_hx_local_0)
return _hx_local_1()
def toArray(self):
_gthis = self
def _hx_local_1():
def _hx_local_0():
arr = list()
_g_head = _gthis._items.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
item = val
arr.append(item)
return arr
return self._sync.execute(_hx_local_0)
return _hx_local_1()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._items = None
_hx_o._sync = None
hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedListImpl._hx_class = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedListImpl
_hx_classes["hx.concurrent.collection._SynchronizedLinkedList.SynchronizedLinkedListImpl"] = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedListImpl
class apptimize_util_ABTDispatch:
_hx_class_name = "apptimize.util.ABTDispatch"
_hx_is_interface = "False"
__slots__ = ("executor", "delayedTasks", "name")
_hx_fields = ["executor", "delayedTasks", "name"]
_hx_methods = ["dispatch", "start"]
_hx_statics = ["dispatchImmediately"]
def __init__(self,name):
self.delayedTasks = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedList_Impl_._new()
self.executor = None
self.name = name
def dispatch(self,task,delay = None):
if (delay is None):
delay = 0
if (self.executor is None):
self.delayedTasks.add(apptimize_util_ABTDispatchTask(task,delay))
return
try:
this1 = hx_concurrent_internal__Either2__Either2.a(task)
self.executor.submit(this1,hx_concurrent_executor_Schedule.ONCE(delay))
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(((("Dispatcher '" + HxOverrides.stringOrNull(self.name)) + "' threw an exception submitting a task: ") + Std.string(e)),_hx_AnonObject({'fileName': "src/apptimize/util/ABTDispatch.hx", 'lineNumber': 56, 'className': "apptimize.util.ABTDispatch", 'methodName': "dispatch"}))
def start(self,maxThreads = None):
if (maxThreads is None):
maxThreads = 1
if (self.executor is not None):
return
if (maxThreads < 1):
apptimize_ABTLogger.w((((("Invalid thread count of " + Std.string(maxThreads)) + ". Starting dispatcher '") + HxOverrides.stringOrNull(self.name)) + "' with minimum thread pool size of 1."),_hx_AnonObject({'fileName': "src/apptimize/util/ABTDispatch.hx", 'lineNumber': 76, 'className': "apptimize.util.ABTDispatch", 'methodName': "start"}))
maxThreads = 1
self.executor = hx_concurrent_executor_Executor.create(maxThreads)
errors = haxe_ds_List()
task = self.delayedTasks.iterator()
while task.hasNext():
task1 = task.next()
try:
b = (task1.startTimestampMs - ((Date.now().date.timestamp() * 1000)))
def _hx_local_1():
_hx_local_0 = (0 if (python_lib_Math.isnan(0)) else (b if (python_lib_Math.isnan(b)) else max(0,b)))
if (Std.isOfType(_hx_local_0,Int) or ((_hx_local_0 is None))):
_hx_local_0
else:
raise "Class cast error"
return _hx_local_0
delay = _hx_local_1()
this1 = hx_concurrent_internal__Either2__Either2.a(task1.task)
self.executor.submit(this1,hx_concurrent_executor_Schedule.ONCE(delay))
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
errors.add(e)
self.delayedTasks.clear()
if (not errors.isEmpty()):
apptimize_ABTLogger.e((("One or more errors occurred starting dispatcher '" + HxOverrides.stringOrNull(self.name)) + "':"),_hx_AnonObject({'fileName': "src/apptimize/util/ABTDispatch.hx", 'lineNumber': 97, 'className': "apptimize.util.ABTDispatch", 'methodName': "start"}))
_g_head = errors.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
error = val
apptimize_ABTLogger.e(((("Dispatcher '" + HxOverrides.stringOrNull(self.name)) + "' threw an exception submitting a task: ") + ("null" if error is None else error)),_hx_AnonObject({'fileName': "src/apptimize/util/ABTDispatch.hx", 'lineNumber': 99, 'className': "apptimize.util.ABTDispatch", 'methodName': "start"}))
@staticmethod
def dispatchImmediately(task):
task()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.executor = None
_hx_o.delayedTasks = None
_hx_o.name = None
apptimize_util_ABTDispatch._hx_class = apptimize_util_ABTDispatch
_hx_classes["apptimize.util.ABTDispatch"] = apptimize_util_ABTDispatch
class apptimize_api_ABTApiResultsPost:
_hx_class_name = "apptimize.api.ABTApiResultsPost"
_hx_is_interface = "False"
__slots__ = ("_apiClient", "_appKey", "_uniqueID", "_requestBytes", "_success", "_failure", "_failureCount", "_url")
_hx_fields = ["_apiClient", "_appKey", "_uniqueID", "_requestBytes", "_success", "_failure", "_failureCount", "_url"]
_hx_methods = ["_getUrl", "_post", "incrementFailureCountForCode", "hxSerialize", "hxUnserialize"]
_hx_statics = ["MAX_FAILURE_DELAY_MS", "DEFAULT_FAILURE_DELAY_MS", "_failureDelayMs", "_pendingMap", "_pendingResults", "_postDispatch", "_loadedPending", "PENDING_LOCK", "_getPendingResultsCount", "onSuccess", "postNextRequestForID", "decrementPendingCount", "incrementFailureDelay", "clearFailureDelay", "onFailure", "pushRequest", "savePendingLogs", "loadPendingLogs", "startDispatching"]
def __init__(self,client,applicationKey,log,success,failure):
self._url = None
self._requestBytes = None
self._failureCount = 0
self._apiClient = client
self._appKey = applicationKey
self._success = success
self._failure = failure
self._uniqueID = log.getUniqueUserKey()
self._url = self._getUrl(applicationKey)
jsonBytes = haxe_io_Bytes.ofString(log.toJSON())
self._requestBytes = jsonBytes
def _getUrl(self,appKey):
metadata = apptimize_ABTDataStore.sharedInstance().getMetaData(appKey)
if (metadata is None):
return None
urls = metadata.getCheckinUrls()
if ((urls is None) or ((len(urls) < 1))):
return None
index = Math.floor(((((len(urls) - 1) + 1)) * python_lib_Random.random()))
endpoint = "server/v4/"
return (HxOverrides.stringOrNull((urls[index] if index >= 0 and index < len(urls) else None)) + ("null" if endpoint is None else endpoint))
def _post(self):
_gthis = self
if (not apptimize_ApptimizeInternal._isInitialized()):
return
if (self._url is None):
self._url = self._getUrl(self._appKey)
if (self._url is None):
apptimize_ABTLogger.e("Unable to post results until metadata is available.",_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 155, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "_post"}))
apptimize_api_ABTApiResultsPost.onFailure(self,None)
return
apptimize_ABTLogger.v(("Posting results to: " + HxOverrides.stringOrNull(self._url)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 161, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "_post"}))
def _hx_local_0(response):
apptimize_ABTLogger.d("Successfully posted results.",_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 171, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "_post"}))
apptimize_ABTLogger.d(("Results JSON:\n" + Std.string(_gthis._requestBytes)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 173, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "_post"}))
apptimize_api_ABTApiResultsPost.onSuccess(_gthis,response)
def _hx_local_1(response):
_gthis.incrementFailureCountForCode(response.responseCode)
apptimize_ABTLogger.e(((("Failed to post results, queuing for retry later: " + Std.string(response.responseCode)) + " ") + HxOverrides.stringOrNull(response.text)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 180, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "_post"}))
apptimize_ABTLogger.e(("Results JSON:\n" + Std.string(_gthis._requestBytes)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 182, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "_post"}))
apptimize_api_ABTApiResultsPost.onFailure(_gthis,response)
apptimize_http_ABTHttpRequest.post(self._url,self._requestBytes,self._appKey,_hx_local_0,_hx_local_1)
def incrementFailureCountForCode(self,status):
if (status >= 400):
_hx_local_0 = self
_hx_local_1 = _hx_local_0._failureCount
_hx_local_0._failureCount = (_hx_local_1 + 1)
_hx_local_1
def hxSerialize(self,s):
s.serialize(self._appKey)
s.serialize(self._requestBytes)
s.serialize(self._failureCount)
s.serialize(self._url)
s.serialize(self._uniqueID)
def hxUnserialize(self,u):
self._appKey = u.unserialize()
self._requestBytes = u.unserialize()
self._failureCount = u.unserialize()
self._url = u.unserialize()
self._uniqueID = u.unserialize()
@staticmethod
def _getPendingResultsCount():
count = 0
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
count = apptimize_api_ABTApiResultsPost._pendingResults.get_length()
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
return count
@staticmethod
def onSuccess(request,response):
apptimize_api_ABTApiResultsPost.clearFailureDelay()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
apptimize_api_ABTApiResultsPost._pendingResults.remove(request)
count = apptimize_api_ABTApiResultsPost.decrementPendingCount(request._uniqueID)
if (count > 0):
apptimize_api_ABTApiResultsPost.postNextRequestForID(request._uniqueID)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
if ((request is not None) and ((request._success is not None))):
request._success(response)
@staticmethod
def postNextRequestForID(id):
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
request = apptimize_api_ABTApiResultsPost._pendingResults.iterator()
while request.hasNext():
request1 = [request.next()]
if ((request1[0] if 0 < len(request1) else None)._uniqueID == id):
def _hx_local_1(request):
def _hx_local_0():
(request[0] if 0 < len(request) else None)._post()
return _hx_local_0
task = _hx_local_1(request1)
if (apptimize_api_ABTApiResultsPost._postDispatch is not None):
apptimize_api_ABTApiResultsPost._postDispatch.dispatch(task,0)
else:
apptimize_util_ABTDispatch.dispatchImmediately(task)
break
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
@staticmethod
def decrementPendingCount(id):
count = 0
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
count = apptimize_api_ABTApiResultsPost._pendingMap.h.get(id,None)
count = (count - 1)
if (count == 0):
apptimize_api_ABTApiResultsPost._pendingMap.remove(id)
else:
apptimize_api_ABTApiResultsPost._pendingMap.h[id] = count
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
return count
@staticmethod
def incrementFailureDelay():
delay = (apptimize_api_ABTApiResultsPost._failureDelayMs.get_value() * 2)
if (delay > apptimize_api_ABTApiResultsPost.MAX_FAILURE_DELAY_MS):
delay = apptimize_api_ABTApiResultsPost.MAX_FAILURE_DELAY_MS
apptimize_api_ABTApiResultsPost._failureDelayMs.set_value(delay)
return apptimize_api_ABTApiResultsPost._failureDelayMs.get_value()
@staticmethod
def clearFailureDelay():
apptimize_api_ABTApiResultsPost._failureDelayMs.set_value(apptimize_api_ABTApiResultsPost.DEFAULT_FAILURE_DELAY_MS)
@staticmethod
def onFailure(request,response):
if (request._failureCount > apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_POST_FAILURE_KEY)):
apptimize_ABTLogger.e("Dropping result post after repeated failure.",_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 267, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "onFailure"}))
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
apptimize_api_ABTApiResultsPost._pendingResults.remove(request)
count = apptimize_api_ABTApiResultsPost.decrementPendingCount(request._uniqueID)
if (count > 0):
apptimize_api_ABTApiResultsPost.postNextRequestForID(request._uniqueID)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
else:
def _hx_local_0():
request._post()
task = _hx_local_0
if (apptimize_api_ABTApiResultsPost._postDispatch is not None):
apptimize_api_ABTApiResultsPost._postDispatch.dispatch(task,apptimize_api_ABTApiResultsPost.incrementFailureDelay())
else:
apptimize_util_ABTDispatch.dispatchImmediately(task)
if (request._failure is not None):
request._failure(response)
@staticmethod
def pushRequest(resultRequest,savePending = None):
if (savePending is None):
savePending = True
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
apptimize_api_ABTApiResultsPost._pendingResults.add(resultRequest)
if (not (resultRequest._uniqueID in apptimize_api_ABTApiResultsPost._pendingMap.h)):
apptimize_api_ABTApiResultsPost._pendingMap.h[resultRequest._uniqueID] = 1
def _hx_local_0():
resultRequest._post()
task = _hx_local_0
if (apptimize_api_ABTApiResultsPost._postDispatch is not None):
apptimize_api_ABTApiResultsPost._postDispatch.dispatch(task,0)
else:
apptimize_util_ABTDispatch.dispatchImmediately(task)
else:
_this = apptimize_api_ABTApiResultsPost._pendingMap
key = resultRequest._uniqueID
value = (apptimize_api_ABTApiResultsPost._pendingMap.h.get(resultRequest._uniqueID,None) + 1)
_this.h[key] = value
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
@staticmethod
def savePendingLogs():
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
apptimize_support_persistence_ABTPersistence.saveObject(apptimize_support_persistence_ABTPersistence.kResultPostsListKey,apptimize_api_ABTApiResultsPost._pendingResults.toArray())
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
@staticmethod
def loadPendingLogs():
apptimize_api_ABTApiResultsPost.PENDING_LOCK.acquire()
try:
if (apptimize_api_ABTApiResultsPost._loadedPending == False):
pendingObject = apptimize_support_persistence_ABTPersistence.loadObject(apptimize_support_persistence_ABTPersistence.kResultPostsListKey)
try:
pendingArray = (list() if ((pendingObject is None)) else pendingObject)
_g = 0
while (_g < len(pendingArray)):
p = (pendingArray[_g] if _g >= 0 and _g < len(pendingArray) else None)
_g = (_g + 1)
apptimize_api_ABTApiResultsPost.pushRequest(p,False)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(("Unable to load pending results posts: " + Std.string(e)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTApiResultsPost.hx", 'lineNumber': 335, 'className': "apptimize.api.ABTApiResultsPost", 'methodName': "loadPendingLogs"}))
apptimize_api_ABTApiResultsPost._loadedPending = True
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_api_ABTApiResultsPost.PENDING_LOCK.release()
@staticmethod
def startDispatching(threadCount):
apptimize_api_ABTApiResultsPost._postDispatch.start(threadCount)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._apiClient = None
_hx_o._appKey = None
_hx_o._uniqueID = None
_hx_o._requestBytes = None
_hx_o._success = None
_hx_o._failure = None
_hx_o._failureCount = None
_hx_o._url = None
apptimize_api_ABTApiResultsPost._hx_class = apptimize_api_ABTApiResultsPost
_hx_classes["apptimize.api.ABTApiResultsPost"] = apptimize_api_ABTApiResultsPost
class apptimize_api_ABTMetadataPoller:
_hx_class_name = "apptimize.api.ABTMetadataPoller"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_metadataTimer", "_interval", "_backgroundInterval", "_isPolling", "_isThreadingEnabled", "isPolling", "stopPolling", "startPolling", "_stopMetadataTimer", "_startMetadataTimer"]
_metadataTimer = None
_interval = None
_backgroundInterval = None
_isPolling = None
_isThreadingEnabled = None
@staticmethod
def isPolling():
return apptimize_api_ABTMetadataPoller._isPolling
@staticmethod
def stopPolling():
apptimize_api_ABTMetadataPoller._stopMetadataTimer()
apptimize_api_ABTMetadataPoller._isPolling = False
@staticmethod
def startPolling(foreground = None):
if (foreground is None):
foreground = True
apptimize_api_ABTMetadataPoller._isThreadingEnabled = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.THREADING_ENABLED_KEY)
apptimize_api_ABTMetadataPoller._interval = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_INTERVAL_MS_KEY)
apptimize_api_ABTMetadataPoller._backgroundInterval = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_BACKGROUND_INTERVAL_MS_KEY)
apptimize_api_ABTMetadataPoller._stopMetadataTimer()
time = (apptimize_api_ABTMetadataPoller._interval if foreground else apptimize_api_ABTMetadataPoller._backgroundInterval)
if (time > 0):
apptimize_ABTLogger.v((("Metadata update interval set to " + Std.string(time)) + " milliseconds."),_hx_AnonObject({'fileName': "src/apptimize/api/ABTMetadataPoller.hx", 'lineNumber': 52, 'className': "apptimize.api.ABTMetadataPoller", 'methodName': "startPolling"}))
apptimize_api_ABTMetadataPoller._startMetadataTimer(time)
apptimize_api_ABTMetadataPoller._isPolling = True
@staticmethod
def _stopMetadataTimer():
if (apptimize_api_ABTMetadataPoller._metadataTimer is not None):
apptimize_api_ABTMetadataPoller._metadataTimer.stop()
apptimize_api_ABTMetadataPoller._metadataTimer = None
@staticmethod
def _startMetadataTimer(interval):
if (not apptimize_api_ABTMetadataPoller._isThreadingEnabled):
return
if (apptimize_api_ABTMetadataPoller._metadataTimer is not None):
apptimize_api_ABTMetadataPoller._stopMetadataTimer()
apptimize_api_ABTMetadataPoller._metadataTimer = apptimize_util_ABTTimer(interval)
apptimize_api_ABTMetadataPoller._metadataTimer.run = apptimize_Apptimize.updateApptimizeMetadataOnce
apptimize_api_ABTMetadataPoller._hx_class = apptimize_api_ABTMetadataPoller
_hx_classes["apptimize.api.ABTMetadataPoller"] = apptimize_api_ABTMetadataPoller
class apptimize_api_ABTMetadataProcessor:
_hx_class_name = "apptimize.api.ABTMetadataProcessor"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["jsonFromMetadataDownload"]
@staticmethod
def jsonFromMetadataDownload(_hx_bytes):
decompressedData = apptimize_util_ABTUtilGzip.decompress(_hx_bytes)
return decompressedData.getString(0,decompressedData.length)
apptimize_api_ABTMetadataProcessor._hx_class = apptimize_api_ABTMetadataProcessor
_hx_classes["apptimize.api.ABTMetadataProcessor"] = apptimize_api_ABTMetadataProcessor
class apptimize_api_ABTSecondaryValuesClient:
_hx_class_name = "apptimize.api.ABTSecondaryValuesClient"
_hx_is_interface = "False"
__slots__ = ("_fetching",)
_hx_fields = ["_fetching"]
_hx_methods = ["fetching", "fetch", "fetchNext", "needNewUrls"]
def __init__(self):
self._fetching = False
def fetching(self):
return self._fetching
def fetch(self,urls,startingValues,done):
_gthis = self
self._fetching = True
state = apptimize_api_ABTSecValFetchState(urls,startingValues)
needFetch = (not state.missingUrls.isEmpty())
def _hx_local_0(allValues):
_gthis._fetching = False
done(allValues,needFetch)
self.fetchNext(state,_hx_local_0)
def fetchNext(self,state,done):
_gthis = self
if state.missingUrls.isEmpty():
done(state.allValues)
return
url = state.missingUrls.pop()
def _hx_local_0(response):
_hx_bytes = haxe_io_Bytes.ofData(response.bytes)
this1 = state.allValues
v = python_lib_Json.loads(_hx_bytes.getString(0,_hx_bytes.length),**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'object_hook': python_Lib.dictToAnon})))
this1.h[url] = v
_gthis.fetchNext(state,done)
def _hx_local_1(response):
apptimize_ABTLogger.e(((("GET " + ("null" if url is None else url)) + " failed: ") + Std.string(response.responseCode)),_hx_AnonObject({'fileName': "src/apptimize/api/ABTSecondaryValuesClient.hx", 'lineNumber': 47, 'className': "apptimize.api.ABTSecondaryValuesClient", 'methodName': "fetchNext"}))
_gthis.fetchNext(state,done)
apptimize_http_ABTHttpRequest.get(url,None,_hx_local_0,_hx_local_1)
def needNewUrls(self,old,current):
def _hx_local_1():
def _hx_local_0(item):
return (python_internal_ArrayImpl.indexOf(old,item,None) < 0)
return (len(list(filter(_hx_local_0,current))) > 0)
return _hx_local_1()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._fetching = None
apptimize_api_ABTSecondaryValuesClient._hx_class = apptimize_api_ABTSecondaryValuesClient
_hx_classes["apptimize.api.ABTSecondaryValuesClient"] = apptimize_api_ABTSecondaryValuesClient
class apptimize_api_ABTSecValFetchState:
_hx_class_name = "apptimize.api.ABTSecValFetchState"
_hx_is_interface = "False"
__slots__ = ("missingUrls", "allValues")
_hx_fields = ["missingUrls", "allValues"]
def __init__(self,allUrls,oldValues):
missingUrls = haxe_ds_List()
startingValues = haxe_ds_StringMap()
_g = 0
while (_g < len(allUrls)):
url = (allUrls[_g] if _g >= 0 and _g < len(allUrls) else None)
_g = (_g + 1)
if ((oldValues is not None) and (url in oldValues.h)):
v = oldValues.h.get(url,None)
startingValues.h[url] = v
else:
missingUrls.add(url)
self.missingUrls = missingUrls
self.allValues = startingValues
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.missingUrls = None
_hx_o.allValues = None
apptimize_api_ABTSecValFetchState._hx_class = apptimize_api_ABTSecValFetchState
_hx_classes["apptimize.api.ABTSecValFetchState"] = apptimize_api_ABTSecValFetchState
class apptimize_api_ABTUserGuid:
_hx_class_name = "apptimize.api.ABTUserGuid"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_userGuid", "generateUserGuid", "S4", "isValidGuid"]
_userGuid = None
@staticmethod
def generateUserGuid():
apptimize_api_ABTUserGuid._userGuid = (((((((((((HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4()) + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4())) + "-") + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4())) + "-") + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4())) + "-") + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4())) + "-") + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4())) + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4())) + HxOverrides.stringOrNull(apptimize_api_ABTUserGuid.S4()))
return apptimize_api_ABTUserGuid._userGuid
@staticmethod
def S4(randomFunction = None):
if (randomFunction is None):
randomFunction = _Math_Math_Impl_.random
x = (randomFunction() * 65536)
rnd = None
try:
rnd = int(x)
except BaseException as _g:
None
rnd = None
return StringTools.hex(rnd,4)
@staticmethod
def isValidGuid(guid):
regex = EReg("(^([0-9A-Fa-f]{8}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{12})$)","")
regex.matchObj = python_lib_Re.search(regex.pattern,guid)
return (regex.matchObj is not None)
apptimize_api_ABTUserGuid._hx_class = apptimize_api_ABTUserGuid
_hx_classes["apptimize.api.ABTUserGuid"] = apptimize_api_ABTUserGuid
class apptimize_events_ABTEventManager:
_hx_class_name = "apptimize.events.ABTEventManager"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_onParticipationCallback", "_onMetadataUpdatedCallback", "_onParticipatedInExperimentCallback", "_onApptimizeInitializedCallback", "setOnMetadataUpdatedCallback", "dispatchOnMetadataUpdated", "setOnParticipationCallback", "dispatchOnParticipation", "setOnParticipatedInExperimentCallback", "dispatchOnParticipatedInExperiment", "setOnApptimizeInitializedCallback", "dispatchOnApptimizeInitialized"]
_onParticipationCallback = None
_onMetadataUpdatedCallback = None
_onParticipatedInExperimentCallback = None
_onApptimizeInitializedCallback = None
@staticmethod
def setOnMetadataUpdatedCallback(updatedCallback):
apptimize_events_ABTEventManager._onMetadataUpdatedCallback = updatedCallback
@staticmethod
def dispatchOnMetadataUpdated():
if (apptimize_events_ABTEventManager._onMetadataUpdatedCallback is not None):
apptimize_events_ABTEventManager._onMetadataUpdatedCallback()
@staticmethod
def setOnParticipationCallback(runCallback):
apptimize_events_ABTEventManager._onParticipationCallback = runCallback
@staticmethod
def dispatchOnParticipation(experimentName,variantName):
if (apptimize_events_ABTEventManager._onParticipationCallback is not None):
apptimize_events_ABTEventManager._onParticipationCallback(experimentName,variantName)
@staticmethod
def setOnParticipatedInExperimentCallback(callback):
apptimize_events_ABTEventManager._onParticipatedInExperimentCallback = callback
@staticmethod
def dispatchOnParticipatedInExperiment(variantInfo,isFirstParticipation):
if (apptimize_events_ABTEventManager._onParticipatedInExperimentCallback is not None):
apptimize_events_ABTEventManager._onParticipatedInExperimentCallback(variantInfo,isFirstParticipation)
apptimize_events_ABTEventManager.dispatchOnParticipation(variantInfo.getExperimentName(),variantInfo.getVariantName())
@staticmethod
def setOnApptimizeInitializedCallback(callback):
apptimize_events_ABTEventManager._onApptimizeInitializedCallback = callback
@staticmethod
def dispatchOnApptimizeInitialized():
if (apptimize_events_ABTEventManager._onApptimizeInitializedCallback is not None):
apptimize_events_ABTEventManager._onApptimizeInitializedCallback()
apptimize_events_ABTEventManager._hx_class = apptimize_events_ABTEventManager
_hx_classes["apptimize.events.ABTEventManager"] = apptimize_events_ABTEventManager
class apptimize_filter_ABTFilterResult(Enum):
__slots__ = ()
_hx_class_name = "apptimize.filter.ABTFilterResult"
_hx_constructs = ["ABTFilterResultUnknown", "ABTFilterResultFalse", "ABTFilterResultTrue"]
apptimize_filter_ABTFilterResult.ABTFilterResultUnknown = apptimize_filter_ABTFilterResult("ABTFilterResultUnknown", 0, ())
apptimize_filter_ABTFilterResult.ABTFilterResultFalse = apptimize_filter_ABTFilterResult("ABTFilterResultFalse", 1, ())
apptimize_filter_ABTFilterResult.ABTFilterResultTrue = apptimize_filter_ABTFilterResult("ABTFilterResultTrue", 2, ())
apptimize_filter_ABTFilterResult._hx_class = apptimize_filter_ABTFilterResult
_hx_classes["apptimize.filter.ABTFilterResult"] = apptimize_filter_ABTFilterResult
class apptimize_filter_ABTFilterPropertySource(Enum):
__slots__ = ()
_hx_class_name = "apptimize.filter.ABTFilterPropertySource"
_hx_constructs = ["ABTFilterPropertySourceDevice", "ABTFilterPropertySourceUser", "ABTFilterPropertySourcePrefixed"]
apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice = apptimize_filter_ABTFilterPropertySource("ABTFilterPropertySourceDevice", 0, ())
apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceUser = apptimize_filter_ABTFilterPropertySource("ABTFilterPropertySourceUser", 1, ())
apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourcePrefixed = apptimize_filter_ABTFilterPropertySource("ABTFilterPropertySourcePrefixed", 2, ())
apptimize_filter_ABTFilterPropertySource._hx_class = apptimize_filter_ABTFilterPropertySource
_hx_classes["apptimize.filter.ABTFilterPropertySource"] = apptimize_filter_ABTFilterPropertySource
class apptimize_filter_ABTFilterType(Enum):
__slots__ = ()
_hx_class_name = "apptimize.filter.ABTFilterType"
_hx_constructs = ["ABTFilterTypeUnknown", "ABTFilterTypeSimple", "ABTFilterTypeList", "ABTFilterTypeSet", "ABTFilterTypeCompound", "ABTFilterTypePropertyless", "ABTFilterTypeNamed"]
apptimize_filter_ABTFilterType.ABTFilterTypeUnknown = apptimize_filter_ABTFilterType("ABTFilterTypeUnknown", 0, ())
apptimize_filter_ABTFilterType.ABTFilterTypeSimple = apptimize_filter_ABTFilterType("ABTFilterTypeSimple", 1, ())
apptimize_filter_ABTFilterType.ABTFilterTypeList = apptimize_filter_ABTFilterType("ABTFilterTypeList", 2, ())
apptimize_filter_ABTFilterType.ABTFilterTypeSet = apptimize_filter_ABTFilterType("ABTFilterTypeSet", 3, ())
apptimize_filter_ABTFilterType.ABTFilterTypeCompound = apptimize_filter_ABTFilterType("ABTFilterTypeCompound", 4, ())
apptimize_filter_ABTFilterType.ABTFilterTypePropertyless = apptimize_filter_ABTFilterType("ABTFilterTypePropertyless", 5, ())
apptimize_filter_ABTFilterType.ABTFilterTypeNamed = apptimize_filter_ABTFilterType("ABTFilterTypeNamed", 6, ())
apptimize_filter_ABTFilterType._hx_class = apptimize_filter_ABTFilterType
_hx_classes["apptimize.filter.ABTFilterType"] = apptimize_filter_ABTFilterType
class apptimize_filter_ABTFilterOperator(Enum):
__slots__ = ()
_hx_class_name = "apptimize.filter.ABTFilterOperator"
_hx_constructs = ["ABTFilterOperatorUnknown", "ABTFilterOperatorEquals", "ABTFilterOperatorNotEquals", "ABTFilterOperatorRegex", "ABTFilterOperatorNotRegex", "ABTFilterOperatorGreaterThan", "ABTFilterOperatorGreaterThanOrEqual", "ABTFilterOperatorLessThan", "ABTFilterOperatorLessThanOrEqual", "ABTFilterOperatorInList", "ABTFilterOperatorNotInList", "ABTFilterOperatorIntersection", "ABTFilterOperatorCompoundOr", "ABTFilterOperatorCompoundAnd", "ABTFilterOperatorCompoundSingleNot", "ABTFilterOperatorCompoundSingleIsNull", "ABTFilterOperatorCompoundSingleIsNotNull", "ABTFilterOperatorPropertyIsNull", "ABTFilterOperatorPropertyIsNotNull", "ABTFilterOperatorPropertyIsRecognized", "ABTFilterOperatorPropertyIsNotRecognized", "ABTFilterOperatorOperatorIsRecognized", "ABTFilterOperatorOperatorIsNotRecognized", "ABTFilterOperatorValueOf"]
apptimize_filter_ABTFilterOperator.ABTFilterOperatorUnknown = apptimize_filter_ABTFilterOperator("ABTFilterOperatorUnknown", 0, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals = apptimize_filter_ABTFilterOperator("ABTFilterOperatorEquals", 1, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotEquals = apptimize_filter_ABTFilterOperator("ABTFilterOperatorNotEquals", 2, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorRegex = apptimize_filter_ABTFilterOperator("ABTFilterOperatorRegex", 3, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotRegex = apptimize_filter_ABTFilterOperator("ABTFilterOperatorNotRegex", 4, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThan = apptimize_filter_ABTFilterOperator("ABTFilterOperatorGreaterThan", 5, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThanOrEqual = apptimize_filter_ABTFilterOperator("ABTFilterOperatorGreaterThanOrEqual", 6, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThan = apptimize_filter_ABTFilterOperator("ABTFilterOperatorLessThan", 7, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThanOrEqual = apptimize_filter_ABTFilterOperator("ABTFilterOperatorLessThanOrEqual", 8, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorInList = apptimize_filter_ABTFilterOperator("ABTFilterOperatorInList", 9, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotInList = apptimize_filter_ABTFilterOperator("ABTFilterOperatorNotInList", 10, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorIntersection = apptimize_filter_ABTFilterOperator("ABTFilterOperatorIntersection", 11, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundOr = apptimize_filter_ABTFilterOperator("ABTFilterOperatorCompoundOr", 12, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundAnd = apptimize_filter_ABTFilterOperator("ABTFilterOperatorCompoundAnd", 13, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleNot = apptimize_filter_ABTFilterOperator("ABTFilterOperatorCompoundSingleNot", 14, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleIsNull = apptimize_filter_ABTFilterOperator("ABTFilterOperatorCompoundSingleIsNull", 15, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleIsNotNull = apptimize_filter_ABTFilterOperator("ABTFilterOperatorCompoundSingleIsNotNull", 16, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNull = apptimize_filter_ABTFilterOperator("ABTFilterOperatorPropertyIsNull", 17, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNotNull = apptimize_filter_ABTFilterOperator("ABTFilterOperatorPropertyIsNotNull", 18, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsRecognized = apptimize_filter_ABTFilterOperator("ABTFilterOperatorPropertyIsRecognized", 19, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNotRecognized = apptimize_filter_ABTFilterOperator("ABTFilterOperatorPropertyIsNotRecognized", 20, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorOperatorIsRecognized = apptimize_filter_ABTFilterOperator("ABTFilterOperatorOperatorIsRecognized", 21, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorOperatorIsNotRecognized = apptimize_filter_ABTFilterOperator("ABTFilterOperatorOperatorIsNotRecognized", 22, ())
apptimize_filter_ABTFilterOperator.ABTFilterOperatorValueOf = apptimize_filter_ABTFilterOperator("ABTFilterOperatorValueOf", 23, ())
apptimize_filter_ABTFilterOperator._hx_class = apptimize_filter_ABTFilterOperator
_hx_classes["apptimize.filter.ABTFilterOperator"] = apptimize_filter_ABTFilterOperator
class apptimize_filter_ABTFilter:
_hx_class_name = "apptimize.filter.ABTFilter"
_hx_is_interface = "False"
__slots__ = ("property", "propertySource", "value", "filterType", "filterOperator", "callServerURLKey")
_hx_fields = ["property", "propertySource", "value", "filterType", "filterOperator", "callServerURLKey"]
_hx_methods = ["fromJSON", "isSupportedOperator", "isSupportedProperty", "currentDeviceValue", "hasSupportedProperty", "filterMatchesEnvironment", "getUrlKeys"]
_hx_statics = ["kABTFilterKeyValue", "kABTFilterKeyType", "kABTFilterKeyProperty", "kABTFilterKeyOperator", "kABTFilterKeyPropertySource", "kABTFilterKeyCallServerInputs", "kABTFilterKeyCallURLKey", "kABTFilterKeyUserAttribute", "kABTFilterKeyPrefixedAttribute", "kABTFilterKeyNamedFilter", "filterFromJSON", "classForType", "filterForTypeFromJSON", "operatorFromString", "typeFromOperator"]
def __init__(self):
self.callServerURLKey = None
self.filterOperator = None
self.filterType = None
self.value = None
self.propertySource = None
self.property = None
def fromJSON(self,json):
jsonProperty = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyProperty)
if (jsonProperty is not None):
self.property = jsonProperty
self.propertySource = apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice
else:
jsonProperty = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyUserAttribute)
if (jsonProperty is not None):
self.property = jsonProperty
self.propertySource = apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceUser
else:
jsonProperty = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyPrefixedAttribute)
if (jsonProperty is not None):
self.property = jsonProperty
self.propertySource = apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourcePrefixed
self.filterOperator = apptimize_filter_ABTFilter.operatorFromString(Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyOperator))
self.filterType = apptimize_filter_ABTFilter.filterForTypeFromJSON(json)
if (self.filterType == apptimize_filter_ABTFilterType.ABTFilterTypeUnknown):
apptimize_ABTLogger.w("Unknown filter type: setting value without type checking.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 149, 'className': "apptimize.filter.ABTFilter", 'methodName': "fromJSON"}))
self.value = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyValue)
self.callServerURLKey = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyCallURLKey)
def isSupportedOperator(self,operatorStr):
return (apptimize_filter_ABTFilter.operatorFromString(operatorStr) != apptimize_filter_ABTFilterOperator.ABTFilterOperatorUnknown)
def isSupportedProperty(self,env,property,source):
found = (None != env.valueForProperty(property,source))
if (not found):
apptimize_ABTLogger.d((("Property \"" + ("null" if property is None else property)) + "\" not found which is expected by a filter."),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 166, 'className': "apptimize.filter.ABTFilter", 'methodName': "isSupportedProperty"}))
return found
def currentDeviceValue(self,env):
return env.valueForProperty(self.property,self.propertySource)
def hasSupportedProperty(self,env):
return self.isSupportedProperty(env,self.property,self.propertySource)
def filterMatchesEnvironment(self,env):
apptimize_ABTLogger.e("Unknown filter type. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 181, 'className': "apptimize.filter.ABTFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
def getUrlKeys(self):
if (self.callServerURLKey is None):
return []
return [self.callServerURLKey]
@staticmethod
def filterFromJSON(json):
filterType = apptimize_filter_ABTFilter.filterForTypeFromJSON(json)
classType = apptimize_filter_ABTFilter.classForType(filterType)
if (classType is None):
apptimize_ABTLogger.e(("Unable to find filter type: " + Std.string(filterType)),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 110, 'className': "apptimize.filter.ABTFilter", 'methodName': "filterFromJSON"}))
return None
abtFilter = classType(*[])
abtFilter.fromJSON(json)
return abtFilter
@staticmethod
def classForType(filterType):
if (filterType == apptimize_filter_ABTFilterType.ABTFilterTypeSimple):
return apptimize_filter_ABTSimpleFilter
if (filterType == apptimize_filter_ABTFilterType.ABTFilterTypeCompound):
return apptimize_filter_ABTCompoundFilter
if (filterType == apptimize_filter_ABTFilterType.ABTFilterTypeList):
return apptimize_filter_ABTListFilter
if (filterType == apptimize_filter_ABTFilterType.ABTFilterTypeSet):
return apptimize_filter_ABTSetFilter
if (filterType == apptimize_filter_ABTFilterType.ABTFilterTypeNamed):
return apptimize_filter_ABTNamedFilterProxy
return apptimize_filter_ABTUnknownFilter
@staticmethod
def filterForTypeFromJSON(json):
abtOperator = apptimize_filter_ABTFilter.operatorFromString(Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyOperator))
_hx_type = apptimize_filter_ABTFilter.typeFromOperator(abtOperator)
return _hx_type
@staticmethod
def operatorFromString(string):
if ("=" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals
if ("!=" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotEquals
if ("regex" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorRegex
if ("not_regex" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotRegex
if (">" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThan
if (">=" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThanOrEqual
if ("<" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThan
if ("<=" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThanOrEqual
if ("in" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorInList
if ("not_in" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotInList
if ("intersection" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorIntersection
if ("or" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundOr
if ("and" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundAnd
if ("not" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleNot
if ("is_null" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleIsNull
if ("is_not_null" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleIsNotNull
if ("is_property_null" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNull
if ("is_property_not_null" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNotNull
if ("is_recognized_property" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsRecognized
if ("is_not_recognized_property" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNotRecognized
if ("is_recognized_operator" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorOperatorIsRecognized
if ("is_not_recognized_operator" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorOperatorIsNotRecognized
if ("value_of" == string):
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorValueOf
return apptimize_filter_ABTFilterOperator.ABTFilterOperatorUnknown
@staticmethod
def typeFromOperator(abtOperator):
tmp = abtOperator.index
if (tmp == 0):
pass
elif (tmp == 1):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 2):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 3):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 4):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 5):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 6):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 7):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 8):
return apptimize_filter_ABTFilterType.ABTFilterTypeSimple
elif (tmp == 9):
return apptimize_filter_ABTFilterType.ABTFilterTypeList
elif (tmp == 10):
return apptimize_filter_ABTFilterType.ABTFilterTypeList
elif (tmp == 11):
return apptimize_filter_ABTFilterType.ABTFilterTypeSet
elif (tmp == 12):
return apptimize_filter_ABTFilterType.ABTFilterTypeCompound
elif (tmp == 13):
return apptimize_filter_ABTFilterType.ABTFilterTypeCompound
elif (tmp == 14):
return apptimize_filter_ABTFilterType.ABTFilterTypeCompound
elif (tmp == 15):
return apptimize_filter_ABTFilterType.ABTFilterTypeCompound
elif (tmp == 16):
return apptimize_filter_ABTFilterType.ABTFilterTypeCompound
elif (tmp == 17):
return apptimize_filter_ABTFilterType.ABTFilterTypePropertyless
elif (tmp == 18):
return apptimize_filter_ABTFilterType.ABTFilterTypePropertyless
elif (tmp == 19):
return apptimize_filter_ABTFilterType.ABTFilterTypePropertyless
elif (tmp == 20):
return apptimize_filter_ABTFilterType.ABTFilterTypePropertyless
elif (tmp == 21):
return apptimize_filter_ABTFilterType.ABTFilterTypePropertyless
elif (tmp == 22):
return apptimize_filter_ABTFilterType.ABTFilterTypePropertyless
elif (tmp == 23):
return apptimize_filter_ABTFilterType.ABTFilterTypeNamed
else:
pass
apptimize_ABTLogger.e("Unknown filter type. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 298, 'className': "apptimize.filter.ABTFilter", 'methodName': "typeFromOperator"}))
return apptimize_filter_ABTFilterType.ABTFilterTypeUnknown
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.property = None
_hx_o.propertySource = None
_hx_o.value = None
_hx_o.filterType = None
_hx_o.filterOperator = None
_hx_o.callServerURLKey = None
apptimize_filter_ABTFilter._hx_class = apptimize_filter_ABTFilter
_hx_classes["apptimize.filter.ABTFilter"] = apptimize_filter_ABTFilter
class apptimize_filter_ABTSimpleFilter(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTSimpleFilter"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["fromJSON", "filterMatchesEnvironment"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
self.value = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyValue)
def filterMatchesEnvironment(self,env):
if (not self.hasSupportedProperty(env)):
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
currentValue = self.currentDeviceValue(env)
filterValue = self.value
if ((currentValue is None) or ((filterValue is None))):
apptimize_ABTLogger.w("Filter has null value type. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 329, 'className': "apptimize.filter.ABTSimpleFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if (Type.getClass(currentValue) == str):
if ((Type.typeof(filterValue) == ValueType.TFloat) or ((Type.typeof(filterValue) == ValueType.TInt))):
return apptimize_filter_ABTFilterUtils.ABTEvaluateNumber(currentValue,self.filterOperator,filterValue)
if (Type.getClass(filterValue) != str):
apptimize_ABTLogger.w("Filter value does not match property type of string. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 340, 'className': "apptimize.filter.ABTSimpleFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if ((((self.property == "apptimize_version") or ((self.property == "system_version"))) or ((self.property == "app_version"))) or ((self.property == "operating_system_version"))):
return apptimize_filter_ABTFilterUtils.ABTEvaluateVersionString(currentValue,self.filterOperator,filterValue)
return apptimize_filter_ABTFilterUtils.ABTEvaluateString(currentValue,self.filterOperator,filterValue)
if ((Type.typeof(currentValue) == ValueType.TFloat) or ((Type.typeof(currentValue) == ValueType.TInt))):
if (Type.getClass(filterValue) == str):
return apptimize_filter_ABTFilterUtils.ABTEvaluateNumber(currentValue,self.filterOperator,filterValue)
if ((Type.typeof(filterValue) != ValueType.TFloat) and ((Type.typeof(filterValue) != ValueType.TInt))):
apptimize_ABTLogger.w("Filter value does not match property type of number. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 358, 'className': "apptimize.filter.ABTSimpleFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
return apptimize_filter_ABTFilterUtils.ABTEvaluateNumber(currentValue,self.filterOperator,filterValue)
if (Type.typeof(currentValue) == ValueType.TBool):
if (Type.getClass(filterValue) == str):
if (Reflect.field(filterValue,"toLowerCase")() == "true"):
filterValue = True
elif (Reflect.field(filterValue,"toLowerCase")() == "false"):
filterValue = False
return apptimize_filter_ABTFilterUtils.ABTEvaluateBool(currentValue,self.filterOperator,filterValue)
apptimize_ABTLogger.w("Simple filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 378, 'className': "apptimize.filter.ABTSimpleFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_filter_ABTSimpleFilter._hx_class = apptimize_filter_ABTSimpleFilter
_hx_classes["apptimize.filter.ABTSimpleFilter"] = apptimize_filter_ABTSimpleFilter
class apptimize_filter_ABTCompoundFilter(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTCompoundFilter"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["fromJSON", "filterMatchesEnvironment", "getUrlKeys"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
filtersArray = list()
dynamicArray = Reflect.field(json,"value")
_g = 0
while (_g < len(dynamicArray)):
_hx_filter = (dynamicArray[_g] if _g >= 0 and _g < len(dynamicArray) else None)
_g = (_g + 1)
ff = apptimize_filter_ABTFilter.filterFromJSON(_hx_filter)
filtersArray.append(ff)
self.value = filtersArray
def filterMatchesEnvironment(self,env):
children = self.value
if (len(children) < 1):
apptimize_ABTLogger.w((("Compound filter \"" + Std.string(self)) + "\" has an empty compound set. Filter match is unknown."),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 407, 'className': "apptimize.filter.ABTCompoundFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundAnd):
result = apptimize_filter_ABTFilterResult.ABTFilterResultTrue
_g = 0
while (_g < len(children)):
_hx_filter = (children[_g] if _g >= 0 and _g < len(children) else None)
_g = (_g + 1)
currentResult = _hx_filter.filterMatchesEnvironment(env)
result = apptimize_filter_ABTFilterUtils.ABTFilterAnd(result,currentResult)
return result
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundOr):
result = apptimize_filter_ABTFilterResult.ABTFilterResultFalse
_g = 0
while (_g < len(children)):
_hx_filter = (children[_g] if _g >= 0 and _g < len(children) else None)
_g = (_g + 1)
currentResult = _hx_filter.filterMatchesEnvironment(env)
result = apptimize_filter_ABTFilterUtils.ABTFilterOr(result,currentResult)
return result
if ((self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleNot) and ((len(children) == 1))):
child = (children[0] if 0 < len(children) else None)
result = child.filterMatchesEnvironment(env)
if (result == apptimize_filter_ABTFilterResult.ABTFilterResultFalse):
result = apptimize_filter_ABTFilterResult.ABTFilterResultTrue
elif (result == apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
result = apptimize_filter_ABTFilterResult.ABTFilterResultFalse
return result
if ((self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleIsNull) and ((len(children) == 1))):
child = (children[0] if 0 < len(children) else None)
result = (apptimize_filter_ABTFilterResult.ABTFilterResultTrue if ((child.filterMatchesEnvironment(env) == apptimize_filter_ABTFilterResult.ABTFilterResultUnknown)) else apptimize_filter_ABTFilterResult.ABTFilterResultFalse)
return result
if ((self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorCompoundSingleIsNotNull) and ((len(children) == 1))):
child = (children[0] if 0 < len(children) else None)
if (child.filterMatchesEnvironment(env) != apptimize_filter_ABTFilterResult.ABTFilterResultUnknown):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
apptimize_ABTLogger.w((("Filter \"" + Std.string(self)) + "\" has an unsupported compound operator or children count. Filter match is unknown."),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 463, 'className': "apptimize.filter.ABTCompoundFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
def getUrlKeys(self):
children = self.value
_g = []
_g_current = 0
_g_array = children
while (_g_current < len(_g_array)):
x = _g_current
_g_current = (_g_current + 1)
x1 = (_g_array[x] if x >= 0 and x < len(_g_array) else None)
x2 = x1.getUrlKeys()
_g.append(x2)
_g1 = []
e = HxOverrides.iterator(_g)
while e.hasNext():
e1 = e.next()
x = HxOverrides.iterator(e1)
while x.hasNext():
x1 = x.next()
_g1.append(x1)
childUrls = Lambda.array(_g1)
if (self.callServerURLKey is None):
return childUrls
return ([self.callServerURLKey] + childUrls)
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_filter_ABTCompoundFilter._hx_class = apptimize_filter_ABTCompoundFilter
_hx_classes["apptimize.filter.ABTCompoundFilter"] = apptimize_filter_ABTCompoundFilter
class apptimize_filter_ABTListFilter(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTListFilter"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["fromJSON", "filterMatchesEnvironment"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
self.value = Reflect.field(json,"value")
def filterMatchesEnvironment(self,env):
if (not self.hasSupportedProperty(env)):
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
children = self.value
currentValue = self.currentDeviceValue(env)
if (currentValue is None):
apptimize_ABTLogger.w((("Filter \"" + Std.string(self)) + "\" is attempting to match against a null device property."),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 502, 'className': "apptimize.filter.ABTListFilter", 'methodName': "filterMatchesEnvironment"}))
if (python_internal_ArrayImpl.indexOf(children,currentValue,None) > -1):
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorInList):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
elif (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorInList):
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
inList = False
_g = 0
while (_g < len(children)):
childValue = (children[_g] if _g >= 0 and _g < len(children) else None)
_g = (_g + 1)
if (Type.getClass(currentValue) == str):
inList = (inList or ((apptimize_filter_ABTFilterUtils.ABTEvaluateString(currentValue,apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals,childValue) == apptimize_filter_ABTFilterResult.ABTFilterResultTrue)))
else:
inList = (inList or ((apptimize_filter_ABTFilterUtils.ABTEvaluateNumber(currentValue,apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals,childValue) == apptimize_filter_ABTFilterResult.ABTFilterResultTrue)))
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotInList):
if (not inList):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if inList:
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_filter_ABTListFilter._hx_class = apptimize_filter_ABTListFilter
_hx_classes["apptimize.filter.ABTListFilter"] = apptimize_filter_ABTListFilter
class apptimize_filter_ABTSetFilter(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTSetFilter"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["fromJSON", "filterMatchesEnvironment"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
values = list()
dynamicArray = Reflect.field(json,"value")
_g = 0
while (_g < len(dynamicArray)):
val = (dynamicArray[_g] if _g >= 0 and _g < len(dynamicArray) else None)
_g = (_g + 1)
values.append(val)
self.value = values
def filterMatchesEnvironment(self,env):
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_filter_ABTSetFilter._hx_class = apptimize_filter_ABTSetFilter
_hx_classes["apptimize.filter.ABTSetFilter"] = apptimize_filter_ABTSetFilter
class apptimize_filter_ABTUnknownFilter(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTUnknownFilter"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["fromJSON", "filterMatchesEnvironment"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
def filterMatchesEnvironment(self,env):
apptimize_ABTLogger.e("Unknown filter requested. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 599, 'className': "apptimize.filter.ABTUnknownFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_filter_ABTUnknownFilter._hx_class = apptimize_filter_ABTUnknownFilter
_hx_classes["apptimize.filter.ABTUnknownFilter"] = apptimize_filter_ABTUnknownFilter
class apptimize_filter_ABTPropertylessFilter(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTPropertylessFilter"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["fromJSON", "filterMatchesEnvironment"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
self.value = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyValue)
def filterMatchesEnvironment(self,env):
filterValue = self.value
if ((filterValue is None) or ((Type.getClass(filterValue) != str))):
apptimize_ABTLogger.w("Property-less filter requires a string value. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 619, 'className': "apptimize.filter.ABTPropertylessFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
filterString = filterValue
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNull):
currentValue = env.valueForProperty(filterString,apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice)
if (currentValue is None):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNotNull):
currentValue = env.valueForProperty(filterString,apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice)
if (currentValue is not None):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsRecognized):
if (self.isSupportedProperty(env,filterString,apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice) == True):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorPropertyIsNotRecognized):
if (self.isSupportedProperty(env,filterString,apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice) == False):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorOperatorIsRecognized):
if (self.isSupportedOperator(filterString) == True):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (self.filterOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorOperatorIsNotRecognized):
if (self.isSupportedOperator(filterString) == False):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
apptimize_ABTLogger.w("Property-less filter attempted with an invalid operator. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 651, 'className': "apptimize.filter.ABTPropertylessFilter", 'methodName': "filterMatchesEnvironment"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_filter_ABTPropertylessFilter._hx_class = apptimize_filter_ABTPropertylessFilter
_hx_classes["apptimize.filter.ABTPropertylessFilter"] = apptimize_filter_ABTPropertylessFilter
class apptimize_filter_ABTNamedFilterProxy(apptimize_filter_ABTFilter):
_hx_class_name = "apptimize.filter.ABTNamedFilterProxy"
_hx_is_interface = "False"
__slots__ = ("namedFilter",)
_hx_fields = ["namedFilter"]
_hx_methods = ["fromJSON", "filterMatchesEnvironment"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilter
def __init__(self):
self.namedFilter = None
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
self.namedFilter = Reflect.field(json,apptimize_filter_ABTFilter.kABTFilterKeyNamedFilter)
def filterMatchesEnvironment(self,env):
return env.namedFilterResult(self.namedFilter)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.namedFilter = None
apptimize_filter_ABTNamedFilterProxy._hx_class = apptimize_filter_ABTNamedFilterProxy
_hx_classes["apptimize.filter.ABTNamedFilterProxy"] = apptimize_filter_ABTNamedFilterProxy
class apptimize_filter_ABTFilterUtils:
_hx_class_name = "apptimize.filter.ABTFilterUtils"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["__meta__", "ABTFilterAnd", "ABTFilterOr", "ABTEvaluateString", "ABTEvaluateBool", "ABTEvaluateNumber", "ABTEvaluateVersionString"]
@staticmethod
def ABTFilterAnd(left,right):
if ((left == apptimize_filter_ABTFilterResult.ABTFilterResultTrue) and ((right == apptimize_filter_ABTFilterResult.ABTFilterResultUnknown))):
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if ((left == apptimize_filter_ABTFilterResult.ABTFilterResultTrue) and ((right == apptimize_filter_ABTFilterResult.ABTFilterResultFalse))):
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if ((left == apptimize_filter_ABTFilterResult.ABTFilterResultUnknown) and ((right == apptimize_filter_ABTFilterResult.ABTFilterResultFalse))):
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
return left
@staticmethod
def ABTFilterOr(left,right):
if ((left == apptimize_filter_ABTFilterResult.ABTFilterResultUnknown) and ((right == apptimize_filter_ABTFilterResult.ABTFilterResultFalse))):
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if ((left == apptimize_filter_ABTFilterResult.ABTFilterResultTrue) and ((right == apptimize_filter_ABTFilterResult.ABTFilterResultFalse))):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
if ((left == apptimize_filter_ABTFilterResult.ABTFilterResultTrue) and ((right == apptimize_filter_ABTFilterResult.ABTFilterResultUnknown))):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
return right
@staticmethod
def ABTEvaluateString(left,abtOperator,right):
leftString = ""
rightString = ""
if ((left is None) or ((right is None))):
apptimize_ABTLogger.w("String comparison attempted with null string. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 703, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateString"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if (Type.getClass(left) == str):
leftString = left
if (Type.getClass(right) == str):
rightString = right
leftString = leftString.lower()
rightString = rightString.lower()
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals):
if (leftString == rightString):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotEquals):
if (leftString != rightString):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
apptimize_ABTLogger.w("String comparison attempted with an invalid operator. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 723, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateString"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def ABTEvaluateBool(left,abtOperator,right):
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals):
if (left == right):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotEquals):
if (left != right):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
apptimize_ABTLogger.w("Bool comparison attempted with an invalid operator. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 732, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateBool"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def ABTEvaluateNumber(left,abtOperator,right):
leftFloat = None
rightFloat = None
if ((left is None) or ((right is None))):
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if (Type.getClass(left) == str):
leftFloat = Std.parseFloat(left)
else:
leftFloat = left
if (Type.getClass(right) == str):
rightFloat = Std.parseFloat(right)
else:
rightFloat = right
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals):
if (leftFloat == rightFloat):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotEquals):
if (leftFloat != rightFloat):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThan):
if (leftFloat > rightFloat):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThanOrEqual):
if (leftFloat >= rightFloat):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThan):
if (leftFloat < rightFloat):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThanOrEqual):
if (leftFloat <= rightFloat):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
apptimize_ABTLogger.w("Number comparison attempted with an invalid operator. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 764, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateNumber"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
@staticmethod
def ABTEvaluateVersionString(left,abtOperator,right):
if ((left is None) or ((left == ""))):
left = "0"
if ((right is None) or ((right == ""))):
right = "0"
if ((Type.getClass(left) != str) or ((Type.getClass(right) != str))):
apptimize_ABTLogger.w("Unable to compare versions as values are not strings. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 782, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateVersionString"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
leftString = left
rightString = right
leftComponents = leftString.split(".")
rightComponents = rightString.split(".")
leftLength = len(leftComponents)
if (leftLength < 3):
_g = leftLength
_g1 = 3
while (_g < _g1):
i = _g
_g = (_g + 1)
leftComponents.append("0")
rightLength = len(rightComponents)
if (rightLength < 3):
_g = rightLength
_g1 = 3
while (_g < _g1):
i = _g
_g = (_g + 1)
rightComponents.append("0")
leftString = ".".join([python_Boot.toString1(x1,'') for x1 in leftComponents])
rightString = ".".join([python_Boot.toString1(x1,'') for x1 in rightComponents])
leftVersion = None
rightVersion = None
try:
leftVersion = thx_semver__Version_Version_Impl_.stringToVersion(leftString)
except BaseException as _g:
None
apptimize_ABTLogger.w((("Unable to validate left (current) version: " + ("null" if leftString is None else leftString)) + ". Filter match is unknown."),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 822, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateVersionString"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
try:
rightVersion = thx_semver__Version_Version_Impl_.stringToVersion(rightString)
except BaseException as _g:
None
apptimize_ABTLogger.w((("Unable to validate right (filter) version: " + ("null" if rightString is None else rightString)) + ". Filter match is unknown."),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 828, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateVersionString"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorEquals):
if thx_semver__Version_Version_Impl_.equals(leftVersion,rightVersion):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorNotEquals):
if (not thx_semver__Version_Version_Impl_.equals(leftVersion,rightVersion)):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThan):
if thx_semver__Version_Version_Impl_.greaterThan(leftVersion,rightVersion):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorGreaterThanOrEqual):
if thx_semver__Version_Version_Impl_.greaterThanOrEqual(leftVersion,rightVersion):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThan):
if thx_semver__Version_Version_Impl_.lessThan(leftVersion,rightVersion):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
if (abtOperator == apptimize_filter_ABTFilterOperator.ABTFilterOperatorLessThanOrEqual):
if thx_semver__Version_Version_Impl_.lessThanOrEqual(leftVersion,rightVersion):
return apptimize_filter_ABTFilterResult.ABTFilterResultTrue
else:
return apptimize_filter_ABTFilterResult.ABTFilterResultFalse
apptimize_ABTLogger.w("Version comparison attempted with an invalid operator. Filter match is unknown.",_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilter.hx", 'lineNumber': 840, 'className': "apptimize.filter.ABTFilterUtils", 'methodName': "ABTEvaluateVersionString"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
apptimize_filter_ABTFilterUtils._hx_class = apptimize_filter_ABTFilterUtils
_hx_classes["apptimize.filter.ABTFilterUtils"] = apptimize_filter_ABTFilterUtils
class apptimize_filter_ABTFilterEnvParams:
_hx_class_name = "apptimize.filter.ABTFilterEnvParams"
_hx_is_interface = "False"
__slots__ = ("userID", "anonID", "customAttrs", "appProps", "appkey", "internalProps")
_hx_fields = ["userID", "anonID", "customAttrs", "appProps", "appkey", "internalProps"]
def __init__(self,userId,anonId,customAttrs,appkey,appProps,internalProps):
self.userID = userId
self.anonID = anonId
self.internalProps = internalProps
self.customAttrs = customAttrs
self.appkey = appkey
self.appProps = appProps
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.userID = None
_hx_o.anonID = None
_hx_o.customAttrs = None
_hx_o.appProps = None
_hx_o.appkey = None
_hx_o.internalProps = None
apptimize_filter_ABTFilterEnvParams._hx_class = apptimize_filter_ABTFilterEnvParams
_hx_classes["apptimize.filter.ABTFilterEnvParams"] = apptimize_filter_ABTFilterEnvParams
class apptimize_filter_ABTFilterEnvironment:
_hx_class_name = "apptimize.filter.ABTFilterEnvironment"
_hx_is_interface = "False"
__slots__ = ("userID", "anonID", "customProperties", "applicationProperties", "internalProperties", "sequenceNumber", "appkey", "secondaryValueUrlTemplates", "secondaryValueLists", "namedFilters", "namedFilterCalculator")
_hx_fields = ["userID", "anonID", "customProperties", "applicationProperties", "internalProperties", "sequenceNumber", "appkey", "secondaryValueUrlTemplates", "secondaryValueLists", "namedFilters", "namedFilterCalculator"]
_hx_methods = ["getUniqueUserID", "getUserOrAnonID", "valueForProperty", "namedFilterResult", "secondaryUrlForKey", "injectPropsInUrlTemplate"]
def __init__(self,params,urlTemplates,valueLists,sequenceNumber,namedFilters = None,namedFilterResults = None):
self.namedFilterCalculator = None
self.namedFilters = None
self.userID = params.userID
self.anonID = params.anonID
self.customProperties = apptimize_support_properties_ABTCustomProperties()
self.secondaryValueUrlTemplates = urlTemplates
self.secondaryValueLists = valueLists
self.applicationProperties = params.appProps
self.internalProperties = params.internalProps
self.appkey = params.appkey
self.sequenceNumber = sequenceNumber
if (params.customAttrs is not None):
self.customProperties.setProperties(params.customAttrs)
self.customProperties.setPropertyForNamespace("app_key",params.appkey,apptimize_support_properties_CustomPropertyNamespace.ApptimizeLocal)
self.namedFilters = namedFilters
if (self.namedFilters is None):
self.namedFilters = list()
filterMap = haxe_ds_StringMap()
_g = 0
_g1 = self.namedFilters
while (_g < len(_g1)):
_hx_filter = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
filterMap.h[_hx_filter.filterName] = _hx_filter
self.namedFilterCalculator = apptimize_filter_ABTNamedFilterCalculator(filterMap,namedFilterResults,list())
def getUniqueUserID(self):
return ((HxOverrides.stringOrNull(self.appkey) + "_") + HxOverrides.stringOrNull(self.getUserOrAnonID()))
def getUserOrAnonID(self):
if (self.userID is not None):
return self.userID
return self.anonID
def valueForProperty(self,property,source):
if (source == apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceDevice):
return self.applicationProperties.valueForProperty(property)
if (source == apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourceUser):
return self.customProperties.valueForNamespacedProperty(property,apptimize_support_properties_CustomPropertyNamespace.UserAttribute)
if (source == apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourcePrefixed):
value = self.customProperties.valueForProperty(property)
if (value is None):
value = self.internalProperties.valueForProperty(property)
return value
return None
def namedFilterResult(self,name):
return self.namedFilterCalculator.resolve(name,self)
def secondaryUrlForKey(self,key):
if ((key is None) or ((key == ""))):
return None
urlTemplate = self.secondaryValueUrlTemplates.h.get(key,None)
if (urlTemplate is None):
apptimize_ABTLogger.e(("unknown secondary url key " + ("null" if key is None else key)),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilterEnvironment.hx", 'lineNumber': 152, 'className': "apptimize.filter.ABTFilterEnvironment", 'methodName': "secondaryUrlForKey"}))
return None
url = self.injectPropsInUrlTemplate(urlTemplate)
if (url is None):
return None
paramSep = "&"
startIndex = None
if (((url.find("?") if ((startIndex is None)) else HxString.indexOfImpl(url,"?",startIndex))) == -1):
paramSep = "?"
fullUrl = (((("" + ("null" if url is None else url)) + ("null" if paramSep is None else paramSep)) + "metadataSequenceNumber=") + Std.string(self.sequenceNumber))
return fullUrl
def injectPropsInUrlTemplate(self,template):
_gthis = self
regex = EReg("\\{([^}]+)}","g")
missingSome = False
def _hx_local_0(subregex):
nonlocal missingSome
key = subregex.matchObj.group(1)
value = _gthis.valueForProperty(key,apptimize_filter_ABTFilterPropertySource.ABTFilterPropertySourcePrefixed)
if (value is None):
missingSome = True
return (("<MISSING:" + ("null" if key is None else key)) + "}>")
else:
return python_lib_urllib_Parse.quote(value,"")
mapped = regex.map(template,_hx_local_0)
if missingSome:
return None
return mapped
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.userID = None
_hx_o.anonID = None
_hx_o.customProperties = None
_hx_o.applicationProperties = None
_hx_o.internalProperties = None
_hx_o.sequenceNumber = None
_hx_o.appkey = None
_hx_o.secondaryValueUrlTemplates = None
_hx_o.secondaryValueLists = None
_hx_o.namedFilters = None
_hx_o.namedFilterCalculator = None
apptimize_filter_ABTFilterEnvironment._hx_class = apptimize_filter_ABTFilterEnvironment
_hx_classes["apptimize.filter.ABTFilterEnvironment"] = apptimize_filter_ABTFilterEnvironment
class apptimize_filter_ABTNamedFilterCalculator:
_hx_class_name = "apptimize.filter.ABTNamedFilterCalculator"
_hx_is_interface = "False"
__slots__ = ("allNamedFilters", "evaluations", "evaluationStack")
_hx_fields = ["allNamedFilters", "evaluations", "evaluationStack"]
_hx_methods = ["resolve"]
def __init__(self,namedFilters,evaluations,stack):
self.allNamedFilters = namedFilters
self.evaluations = evaluations
self.evaluationStack = stack
def resolve(self,name,env):
_hx_filter = self.allNamedFilters.h.get(name,None)
if (_hx_filter is None):
apptimize_ABTLogger.e(("Failed to resolve filter " + ("null" if name is None else name)),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilterEnvironment.hx", 'lineNumber': 217, 'className': "apptimize.filter.ABTNamedFilterCalculator", 'methodName': "resolve"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
if (name in self.evaluationStack):
apptimize_ABTLogger.e(("Found a circular reference on resolving " + ("null" if name is None else name)),_hx_AnonObject({'fileName': "src/apptimize/filter/ABTFilterEnvironment.hx", 'lineNumber': 228, 'className': "apptimize.filter.ABTNamedFilterCalculator", 'methodName': "resolve"}))
return apptimize_filter_ABTFilterResult.ABTFilterResultUnknown
_this = self.evaluationStack
_this.append(name)
result = _hx_filter.performFilterMatchingWithEnvironment(env).result
self.evaluations.h[name] = result
python_internal_ArrayImpl.remove(self.evaluationStack,name)
return result
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.allNamedFilters = None
_hx_o.evaluations = None
_hx_o.evaluationStack = None
apptimize_filter_ABTNamedFilterCalculator._hx_class = apptimize_filter_ABTNamedFilterCalculator
_hx_classes["apptimize.filter.ABTNamedFilterCalculator"] = apptimize_filter_ABTNamedFilterCalculator
class apptimize_filter_ABTFilterableObject:
_hx_class_name = "apptimize.filter.ABTFilterableObject"
_hx_is_interface = "False"
__slots__ = ("filters", "filters2", "overridingInclusiveFilters", "matchingFilters", "nonMatchingFilters")
_hx_fields = ["filters", "filters2", "overridingInclusiveFilters", "matchingFilters", "nonMatchingFilters"]
_hx_methods = ["initialize", "performFilterMatchingWithEnvironment", "computeNewOverrideState", "fromJSON", "jsonToFilterArray", "getUrlKeys", "getUrlKeyProviders", "getUrlKeysFrom", "getLocalUrlKeys", "asUrlProviders"]
def __init__(self):
self.nonMatchingFilters = None
self.matchingFilters = None
self.overridingInclusiveFilters = None
self.filters2 = None
self.filters = None
self.initialize()
def initialize(self):
self.filters = list()
self.filters2 = list()
self.matchingFilters = list()
self.nonMatchingFilters = list()
self.overridingInclusiveFilters = list()
def performFilterMatchingWithEnvironment(self,env):
ret = _hx_AnonObject({'result': apptimize_filter_ABTFilterResult.ABTFilterResultTrue, 'overriding': False})
_g = 0
_g1 = self.filters
while (_g < len(_g1)):
_hx_filter = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
match = _hx_filter.filterMatchesEnvironment(env)
ret.result = apptimize_filter_ABTFilterUtils.ABTFilterAnd(ret.result,match)
_g = 0
_g1 = self.overridingInclusiveFilters
while (_g < len(_g1)):
_hx_filter = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
match = _hx_filter.filterMatchesEnvironment(env)
if (match == apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
ret.result = apptimize_filter_ABTFilterResult.ABTFilterResultTrue
ret.overriding = True
return ret
def computeNewOverrideState(self,wasOverrideOnly,matchResult):
if wasOverrideOnly:
if matchResult.overriding:
return False
elif (matchResult.result != apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
return True
return wasOverrideOnly
def fromJSON(self,json):
self.filters = self.jsonToFilterArray(Reflect.field(json,"filters"))
self.filters2 = self.jsonToFilterArray(Reflect.field(json,"filters2"))
self.overridingInclusiveFilters = self.jsonToFilterArray(Reflect.field(json,"overridingInclusiveFilters"))
if (len(self.filters2) > 0):
self.filters = (self.filters + self.filters2)
def jsonToFilterArray(self,input):
filterArray = (list() if ((input is None)) else input)
def _hx_local_1():
def _hx_local_0(_hx_filter):
return apptimize_filter_ABTFilter.filterFromJSON(_hx_filter)
return list(map(_hx_local_0,filterArray))
return _hx_local_1()
def getUrlKeys(self):
return (self.getLocalUrlKeys() + self.getUrlKeysFrom(self.getUrlKeyProviders()))
def getUrlKeyProviders(self):
return []
def getUrlKeysFrom(self,items):
_g = []
_g_current = 0
_g_array = items
while (_g_current < len(_g_array)):
x = _g_current
_g_current = (_g_current + 1)
x1 = (_g_array[x] if x >= 0 and x < len(_g_array) else None)
x2 = x1.getUrlKeys()
_g.append(x2)
_g1 = []
e = HxOverrides.iterator(_g)
while e.hasNext():
e1 = e.next()
x = HxOverrides.iterator(e1)
while x.hasNext():
x1 = x.next()
_g1.append(x1)
return Lambda.array(_g1)
def getLocalUrlKeys(self):
return (self.getUrlKeysFrom(self.filters) + self.getUrlKeysFrom(self.overridingInclusiveFilters))
def asUrlProviders(self,items):
def _hx_local_1():
def _hx_local_0(item):
return item
return list(map(_hx_local_0,items))
return _hx_local_1()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.filters = None
_hx_o.filters2 = None
_hx_o.overridingInclusiveFilters = None
_hx_o.matchingFilters = None
_hx_o.nonMatchingFilters = None
apptimize_filter_ABTFilterableObject._hx_class = apptimize_filter_ABTFilterableObject
_hx_classes["apptimize.filter.ABTFilterableObject"] = apptimize_filter_ABTFilterableObject
class apptimize_filter_ABTNamedFilter(apptimize_filter_ABTFilterableObject):
_hx_class_name = "apptimize.filter.ABTNamedFilter"
_hx_is_interface = "False"
__slots__ = ("filterName", "trueIsSticky", "falseIsSticky", "nullIsSticky")
_hx_fields = ["filterName", "trueIsSticky", "falseIsSticky", "nullIsSticky"]
_hx_methods = ["fromJSON"]
_hx_statics = ["kABTNamedFilterKeyFilterName", "kABTNamedFilterKeyTrueIsSticky", "kABTNamedFilterKeyFalseIsSticky", "kABTNamedFilterKeyNullIsSticky"]
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilterableObject
def __init__(self,source):
self.nullIsSticky = None
self.falseIsSticky = None
self.trueIsSticky = None
self.filterName = None
super().__init__()
self.fromJSON(source)
def fromJSON(self,json):
super().fromJSON(json)
self.filterName = Reflect.field(json,apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyFilterName)
self.trueIsSticky = Reflect.field(json,apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyTrueIsSticky)
self.falseIsSticky = Reflect.field(json,apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyFalseIsSticky)
self.nullIsSticky = Reflect.field(json,apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyNullIsSticky)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.filterName = None
_hx_o.trueIsSticky = None
_hx_o.falseIsSticky = None
_hx_o.nullIsSticky = None
apptimize_filter_ABTNamedFilter._hx_class = apptimize_filter_ABTNamedFilter
_hx_classes["apptimize.filter.ABTNamedFilter"] = apptimize_filter_ABTNamedFilter
class apptimize_http_ABTHttpResponse:
_hx_class_name = "apptimize.http.ABTHttpResponse"
_hx_is_interface = "False"
__slots__ = ("bytes", "text", "responseCode", "etag")
_hx_fields = ["bytes", "text", "responseCode", "etag"]
_hx_methods = ["isSuccess"]
def __init__(self):
self.etag = None
self.responseCode = -1
self.text = ""
self.bytes = None
def isSuccess(self):
if (self.responseCode != 200):
return (self.responseCode == 304)
else:
return True
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.bytes = None
_hx_o.text = None
_hx_o.responseCode = None
_hx_o.etag = None
apptimize_http_ABTHttpResponse._hx_class = apptimize_http_ABTHttpResponse
_hx_classes["apptimize.http.ABTHttpResponse"] = apptimize_http_ABTHttpResponse
class apptimize_http_ABTHttpRequestInterface:
_hx_class_name = "apptimize.http.ABTHttpRequestInterface"
_hx_is_interface = "True"
__slots__ = ()
_hx_methods = ["get", "post"]
apptimize_http_ABTHttpRequestInterface._hx_class = apptimize_http_ABTHttpRequestInterface
_hx_classes["apptimize.http.ABTHttpRequestInterface"] = apptimize_http_ABTHttpRequestInterface
class apptimize_http_ABTHttpRequest:
_hx_class_name = "apptimize.http.ABTHttpRequest"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["getRequestInterface", "getRealRequestInterface", "get", "post"]
@staticmethod
def getRequestInterface():
return apptimize_http_ABTHttpRequest.getRealRequestInterface()
@staticmethod
def getRealRequestInterface():
return apptimize_http_ABTHttpRequestPython()
@staticmethod
def get(url,requestHeaders,successCallback,failureCallback):
requestInterface = apptimize_http_ABTHttpRequest.getRequestInterface()
requestInterface.get(url,requestHeaders,successCallback,failureCallback)
@staticmethod
def post(url,data,appKey,successCallback,failureCallback):
requestInterface = apptimize_http_ABTHttpRequest.getRequestInterface()
requestInterface.post(url,data,appKey,successCallback,failureCallback)
apptimize_http_ABTHttpRequest._hx_class = apptimize_http_ABTHttpRequest
_hx_classes["apptimize.http.ABTHttpRequest"] = apptimize_http_ABTHttpRequest
class apptimize_http_ABTHttpRequestPython:
_hx_class_name = "apptimize.http.ABTHttpRequestPython"
_hx_is_interface = "False"
__slots__ = ("_successCallback", "_failureCallback", "_timeSent")
_hx_fields = ["_successCallback", "_failureCallback", "_timeSent"]
_hx_methods = ["get", "processGetResponse", "post", "processPostResponse"]
_hx_interfaces = [apptimize_http_ABTHttpRequestInterface]
def __init__(self):
self._timeSent = None
self._failureCallback = None
self._successCallback = None
def get(self,url,requestHeaders,successCallback,failureCallback):
isThreaded = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.THREADING_ENABLED_KEY)
self._successCallback = successCallback
self._failureCallback = failureCallback
headers = apptimize_util_ABTUtilDictionary.stringMapToNativeDictionary(requestHeaders)
self._timeSent = Date.now()
if isThreaded:
s = apptimize_native_python_Session()
Reflect.field(s.headers,"update")(headers)
apptimize_http_ABTNetworkLogger.logRequest("GET",url,s.headers)
session = apptimize_native_python_FuturesSession(**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'session': s})))
session.get(url,**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'background_callback': self.processGetResponse})))
else:
try:
apptimize_http_ABTNetworkLogger.logRequest("GET",url,headers)
resp = apptimize_native_python_Requests.get(url,**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'headers': headers})))
self.processGetResponse(None,resp)
except BaseException as _g:
None
exception = haxe_Exception.caught(_g).unwrap()
response = apptimize_http_ABTHttpResponse()
response.text = (("Failed to download with GET request with exception: \"" + Std.string(exception)) + "\".")
self._failureCallback(response)
def processGetResponse(self,session,response):
httpResponse = apptimize_http_ABTHttpResponse()
httpResponse.bytes = Reflect.field(response,"content")
httpResponse.responseCode = Reflect.field(response,"status_code")
responseHeaders = Reflect.field(response,"headers")
httpResponse.etag = responseHeaders.get("etag")
duration = ((Date.now().date.timestamp() * 1000) - ((self._timeSent.date.timestamp() * 1000)))
apptimize_http_ABTNetworkLogger.logResponse(Reflect.field(response,"url"),duration,Reflect.field(response,"headers"),Reflect.field(response,"text"))
if httpResponse.isSuccess():
self._successCallback(httpResponse)
else:
self._failureCallback(httpResponse)
def post(self,url,data,appKey,successCallback,failureCallback):
isThreaded = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.THREADING_ENABLED_KEY)
headers = dict()
headers["X-App-Key"] = appKey
headers["Content-Type"] = "application/json; charset=UTF-8"
self._successCallback = successCallback
self._failureCallback = failureCallback
self._timeSent = Date.now()
if isThreaded:
s = apptimize_native_python_Session()
Reflect.field(s.headers,"update")(headers)
if apptimize_http_ABTNetworkLogger.shouldLog():
apptimize_http_ABTNetworkLogger.logRequest("POST",url,s.headers,data.toString())
session = apptimize_native_python_FuturesSession(**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'session': s})))
session.post(url,data.b,None,**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'background_callback': self.processPostResponse})))
else:
try:
apptimize_http_ABTNetworkLogger.logRequest("POST",url,headers)
resp = apptimize_native_python_Requests.post(url,data.b,None,**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'headers': headers})))
self.processPostResponse(None,resp)
except BaseException as _g:
None
exception = haxe_Exception.caught(_g).unwrap()
response = apptimize_http_ABTHttpResponse()
response.text = (((("Failed to POST to url \"" + ("null" if url is None else url)) + "\" with exception: ") + Std.string(exception)) + ".")
self._failureCallback(response)
def processPostResponse(self,session,response):
httpResponse = apptimize_http_ABTHttpResponse()
httpResponse.text = Reflect.field(response,"text")
httpResponse.responseCode = Reflect.field(response,"status_code")
duration = ((Date.now().date.timestamp() * 1000) - ((self._timeSent.date.timestamp() * 1000)))
apptimize_http_ABTNetworkLogger.logResponse(Reflect.field(response,"url"),duration,Reflect.field(response,"headers"),Reflect.field(response,"text"))
if httpResponse.isSuccess():
self._successCallback(httpResponse)
else:
self._failureCallback(httpResponse)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._successCallback = None
_hx_o._failureCallback = None
_hx_o._timeSent = None
apptimize_http_ABTHttpRequestPython._hx_class = apptimize_http_ABTHttpRequestPython
_hx_classes["apptimize.http.ABTHttpRequestPython"] = apptimize_http_ABTHttpRequestPython
class apptimize_http_ABTNetworkLogger:
_hx_class_name = "apptimize.http.ABTNetworkLogger"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["shouldLog", "logRequest", "logResponse"]
@staticmethod
def shouldLog():
return (apptimize_ABTLogger.logLevel == apptimize_ABTLogger.LOG_LEVEL_VERBOSE)
@staticmethod
def logRequest(_hx_type,url,headers,body = None):
apptimize_ABTLogger.v(((((((("URL Request " + ("null" if _hx_type is None else _hx_type)) + " ") + ("null" if url is None else url)) + "\nHeaders: ") + Std.string(headers)) + "\nBody: ") + ("null" if body is None else body)),_hx_AnonObject({'fileName': "src/apptimize/http/ABTNetworkLogger.hx", 'lineNumber': 9, 'className': "apptimize.http.ABTNetworkLogger", 'methodName': "logRequest"}))
@staticmethod
def logResponse(url,duration,headers,body = None):
apptimize_ABTLogger.v(((((((("URL Response " + ("null" if url is None else url)) + "\nHeaders: ") + Std.string(headers)) + "\nBody: ") + ("null" if body is None else body)) + "\nResponse Time(ms): ") + Std.string(duration)),_hx_AnonObject({'fileName': "src/apptimize/http/ABTNetworkLogger.hx", 'lineNumber': 13, 'className': "apptimize.http.ABTNetworkLogger", 'methodName': "logResponse"}))
apptimize_http_ABTNetworkLogger._hx_class = apptimize_http_ABTNetworkLogger
_hx_classes["apptimize.http.ABTNetworkLogger"] = apptimize_http_ABTNetworkLogger
class apptimize_models_ABTAlteration(apptimize_filter_ABTFilterableObject):
_hx_class_name = "apptimize.models.ABTAlteration"
_hx_is_interface = "False"
__slots__ = ("_variant", "_key")
_hx_fields = ["_variant", "_key"]
_hx_methods = ["fromJSON", "selectAlterationsIntoArray", "getKey", "getVariant"]
_hx_statics = ["alterationFromJSON", "classForType"]
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilterableObject
def __init__(self):
self._key = None
self._variant = None
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
def selectAlterationsIntoArray(self,env,target,overrideOnly):
match = self.performFilterMatchingWithEnvironment(env)
canSelect = (match.overriding or (not overrideOnly))
selected = (canSelect and ((match.result == apptimize_filter_ABTFilterResult.ABTFilterResultTrue)))
if selected:
apptimize_ABTLogger.v((((((("Selecting alteration \"" + HxOverrides.stringOrNull(self.getKey())) + "\" for variant \"") + HxOverrides.stringOrNull(self.getVariant().getVariantName())) + "\" for user ") + HxOverrides.stringOrNull(env.getUserOrAnonID())) + "."),_hx_AnonObject({'fileName': "src/apptimize/models/ABTAlteration.hx", 'lineNumber': 50, 'className': "apptimize.models.ABTAlteration", 'methodName': "selectAlterationsIntoArray"}))
target.append(self)
def getKey(self):
return self._key
def getVariant(self):
return self._variant
@staticmethod
def alterationFromJSON(json,variant):
classType = apptimize_models_ABTAlteration.classForType(Reflect.field(json,"type"))
instance = classType(*[])
instance.initialize()
instance.fromJSON(json)
instance._variant = variant
return instance
@staticmethod
def classForType(_hx_type):
if ("block" == _hx_type):
return apptimize_models_ABTBlockAlteration
return apptimize_models_ABTValueAlteration
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._variant = None
_hx_o._key = None
apptimize_models_ABTAlteration._hx_class = apptimize_models_ABTAlteration
_hx_classes["apptimize.models.ABTAlteration"] = apptimize_models_ABTAlteration
class apptimize_models_ABTBlockAlteration(apptimize_models_ABTAlteration):
_hx_class_name = "apptimize.models.ABTBlockAlteration"
_hx_is_interface = "False"
__slots__ = ("methodName",)
_hx_fields = ["methodName"]
_hx_methods = ["fromJSON"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_ABTAlteration
def __init__(self):
self.methodName = None
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
self._key = Reflect.field(json,"key")
self.methodName = Reflect.field(json,"methodName")
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.methodName = None
apptimize_models_ABTBlockAlteration._hx_class = apptimize_models_ABTBlockAlteration
_hx_classes["apptimize.models.ABTBlockAlteration"] = apptimize_models_ABTBlockAlteration
class apptimize_models_ABTValueAlteration(apptimize_models_ABTAlteration):
_hx_class_name = "apptimize.models.ABTValueAlteration"
_hx_is_interface = "False"
__slots__ = ("_value", "_type", "_nestedType", "_useDefaultValue")
_hx_fields = ["_value", "_type", "_nestedType", "_useDefaultValue"]
_hx_methods = ["fromJSON", "useDefaultValue", "getValue", "getType", "getNestedType"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_ABTAlteration
def __init__(self):
self._useDefaultValue = None
self._nestedType = None
self._type = None
self._value = None
super().__init__()
def fromJSON(self,json):
super().fromJSON(json)
self._key = Reflect.field(json,"key")
self._value = Reflect.field(json,"value")
self._type = Reflect.field(json,"type")
self._nestedType = Reflect.field(json,"nestedType")
self._useDefaultValue = Reflect.field(json,"useDefaultValue")
if ((self._value is not None) and ((self._type == "dictionary"))):
self._value = apptimize_util_ABTUtilDictionary.dynamicToNativeDictionary(self._value)
def useDefaultValue(self):
return self._useDefaultValue
def getValue(self):
return self._value
def getType(self):
return self._type
def getNestedType(self):
return self._nestedType
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._value = None
_hx_o._type = None
_hx_o._nestedType = None
_hx_o._useDefaultValue = None
apptimize_models_ABTValueAlteration._hx_class = apptimize_models_ABTValueAlteration
_hx_classes["apptimize.models.ABTValueAlteration"] = apptimize_models_ABTValueAlteration
class apptimize_models_ABTJSONObject:
_hx_class_name = "apptimize.models.ABTJSONObject"
_hx_is_interface = "False"
__slots__ = ()
apptimize_models_ABTJSONObject._hx_class = apptimize_models_ABTJSONObject
_hx_classes["apptimize.models.ABTJSONObject"] = apptimize_models_ABTJSONObject
class apptimize_models_ABTMetadata:
_hx_class_name = "apptimize.models.ABTMetadata"
_hx_is_interface = "False"
__slots__ = ("_jsonData", "_seedGroups", "_hotfixes", "_alterationCache", "_namedFilters", "_namedFiltersEvaluations", "_etag", "_secondaryValues")
_hx_fields = ["_jsonData", "_seedGroups", "_hotfixes", "_alterationCache", "_namedFilters", "_namedFiltersEvaluations", "_etag", "_secondaryValues"]
_hx_methods = ["copyPersistentValues", "makeEnvironment", "getGroupsUrlTemplate", "_load_data", "reprocessJson", "uncachedSelectAlterationsIntoArray", "selectAlterationsIntoArray", "extractNeededSecondaryUrls", "extractSdkParameters", "metadataProcessed", "getVariantsCyclesPhases", "getMetaData", "getSequenceNumber", "getCheckinUrls", "getAppKey", "getEtag", "setEtag", "setSecondaryValues", "getSecondaryValues", "getDisabledVersions", "hxSerialize", "serializeV1", "hxUnserialize"]
_hx_statics = ["loadFromString"]
def __init__(self):
self._secondaryValues = None
self._etag = None
self._namedFilters = None
self._hotfixes = None
self._seedGroups = None
self._jsonData = None
self._alterationCache = apptimize_util_ABTLRUCache(apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.ALTERATION_CACHE_SIZE_KEY))
self._namedFiltersEvaluations = haxe_ds_StringMap()
def copyPersistentValues(self,source):
if (source is None):
return
if (source._namedFiltersEvaluations is not None):
self._namedFiltersEvaluations = source._namedFiltersEvaluations.copy()
def makeEnvironment(self,params,sdkParams):
valueLists = self._secondaryValues
_g = haxe_ds_StringMap()
value = self.getGroupsUrlTemplate("lpilot_targeting_id")
_g.h["groupsApiUrl"] = value
value = self.getGroupsUrlTemplate("m_cohort_id")
_g.h["cohortsApiUrl"] = value
templates = _g
if ((sdkParams is not None) and ((sdkParams.callServerUrls is not None))):
templates = sdkParams.callServerUrls
return apptimize_filter_ABTFilterEnvironment(params,templates,valueLists,self.getSequenceNumber(),self._namedFilters,self._namedFiltersEvaluations)
def getGroupsUrlTemplate(self,param):
groupsBaseUrl = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY)
groupsFullUrl = (((("null" if groupsBaseUrl is None else groupsBaseUrl) + "/api/pilot-groups/?appKey={lapp_key}&pilotTargetingId={") + ("null" if param is None else param)) + "}")
return groupsFullUrl
def _load_data(self,content):
self._alterationCache = apptimize_util_ABTLRUCache(apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.ALTERATION_CACHE_SIZE_KEY))
self._jsonData = python_lib_Json.loads(content,**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'object_hook': python_Lib.dictToAnon})))
self.reprocessJson()
def reprocessJson(self):
if (self._jsonData is None):
raise haxe_Exception.thrown("Unable to process metadata")
self._seedGroups = list()
if (self._jsonData.seedGroups is not None):
_g = 0
_g1 = self._jsonData.seedGroups
while (_g < len(_g1)):
sg = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self._seedGroups
x = apptimize_models_ABTSeedGroup(sg)
_this.append(x)
self._hotfixes = list()
if (self._jsonData.hotfixes is not None):
_g = 0
_g1 = self._jsonData.hotfixes
while (_g < len(_g1)):
hf = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self._hotfixes
x = apptimize_models_ABTHotfixVariant(hf)
_this.append(x)
apptimize_ABTLogger.i(("JSONNamedFilters: " + Std.string(self._jsonData.namedFilters)),_hx_AnonObject({'fileName': "src/apptimize/models/ABTMetadata.hx", 'lineNumber': 197, 'className': "apptimize.models.ABTMetadata", 'methodName': "reprocessJson"}))
self._namedFilters = list()
if (self._jsonData.namedFilters is not None):
_g = 0
_g1 = self._jsonData.namedFilters
while (_g < len(_g1)):
item = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self._namedFilters
x = apptimize_filter_ABTNamedFilter(item)
_this.append(x)
def uncachedSelectAlterationsIntoArray(self,env,checkCache = None):
if (checkCache is None):
checkCache = True
alterations = list()
_g = 0
_g1 = self._seedGroups
while (_g < len(_g1)):
seedgroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
seedgroup.selectAlterationsIntoArray(env,alterations,False)
_g = 0
_g1 = self._hotfixes
while (_g < len(_g1)):
hotfix = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
hotfix.selectAlterationsIntoArray(env,alterations,False)
self._namedFiltersEvaluations.h.clear()
return alterations
def selectAlterationsIntoArray(self,env):
alterations = self.uncachedSelectAlterationsIntoArray(env,False)
self.metadataProcessed(env,alterations)
return alterations
def extractNeededSecondaryUrls(self,env):
keys = list()
_g = 0
_g1 = self._seedGroups
while (_g < len(_g1)):
seedgroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
keys = (keys + seedgroup.getUrlKeys())
_g = 0
_g1 = self._hotfixes
while (_g < len(_g1)):
hotfix = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
keys = (keys + hotfix.getUrlKeys())
ret = list()
_hx_map = haxe_ds_StringMap()
_g = 0
while (_g < len(keys)):
key = (keys[_g] if _g >= 0 and _g < len(keys) else None)
_g = (_g + 1)
if (not (key in _hx_map.h)):
value = env.secondaryUrlForKey(key)
_hx_map.h[key] = value
if (value is not None):
ret.append(value)
return ret
def extractSdkParameters(self,env):
ret = apptimize_models_ABTSdkParameters(None)
if (self._seedGroups is not None):
_g = 0
_g1 = self._seedGroups
while (_g < len(_g1)):
seedGroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
seedGroup.applySdkParameters(ret,env)
return ret
def metadataProcessed(self,env,alterations):
metadataProcessedEntry = apptimize_models_results_ABTResultEntryMetadataProcessed(env,self.getSequenceNumber(),self.getVariantsCyclesPhases(alterations))
apptimize_ABTDataStore.sharedInstance().addResultLogEntry(env,metadataProcessedEntry)
def getVariantsCyclesPhases(self,alterations):
variantsCyclesPhases = haxe_ds_IntMap()
_g = 0
while (_g < len(alterations)):
alteration = (alterations[_g] if _g >= 0 and _g < len(alterations) else None)
_g = (_g + 1)
variant = alteration.getVariant()
phase = variant.getPhase()
variantStickyString = ((("v" + Std.string(variant.getVariantID())) + "_") + Std.string(variant.getCycle()))
if ((Type.getClass(variant) != apptimize_models_ABTHotfixVariant) and (not (variant.getVariantID() in variantsCyclesPhases.h))):
k = variant.getVariantID()
v = _hx_AnonObject({'v': variant.getVariantID(), 'c': variant.getCycle(), 'p': phase})
variantsCyclesPhases.set(k,v)
return Lambda.array(variantsCyclesPhases)
def getMetaData(self):
return self._jsonData
def getSequenceNumber(self):
return self._jsonData.sequenceNumber
def getCheckinUrls(self):
return self._jsonData.checkinUrls
def getAppKey(self):
return self._jsonData.appKey
def getEtag(self):
return self._etag
def setEtag(self,etag):
self._etag = etag
def setSecondaryValues(self,values):
self._secondaryValues = values
def getSecondaryValues(self):
return self._secondaryValues
def getDisabledVersions(self):
disableAll = Reflect.field(self._jsonData,"disableAllVersions")
disableVersions = Reflect.field(self._jsonData,"disabledCrossPlatformVersions")
ret = list()
if disableAll:
x = apptimize_Apptimize.getApptimizeSDKVersion()
ret.append(x)
if (disableVersions is not None):
ret = (ret + self._jsonData.disabledCrossPlatformVersions)
return ret
def hxSerialize(self,s):
self.serializeV1(s)
s.serialize(self._secondaryValues)
def serializeV1(self,s):
s.serialize(haxe_format_JsonPrinter.print(self._jsonData,None,None))
s.serialize(self._etag)
def hxUnserialize(self,u):
self._load_data(u.unserialize())
self._etag = u.unserialize()
try:
self._secondaryValues = u.unserialize()
except BaseException as _g:
None
self._secondaryValues = None
self._namedFiltersEvaluations = haxe_ds_StringMap()
@staticmethod
def loadFromString(content):
m = apptimize_models_ABTMetadata()
m._load_data(content)
return m
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._jsonData = None
_hx_o._seedGroups = None
_hx_o._hotfixes = None
_hx_o._alterationCache = None
_hx_o._namedFilters = None
_hx_o._namedFiltersEvaluations = None
_hx_o._etag = None
_hx_o._secondaryValues = None
apptimize_models_ABTMetadata._hx_class = apptimize_models_ABTMetadata
_hx_classes["apptimize.models.ABTMetadata"] = apptimize_models_ABTMetadata
class apptimize_models_ABTRange:
_hx_class_name = "apptimize.models.ABTRange"
_hx_is_interface = "False"
__slots__ = ("start", "end")
_hx_fields = ["start", "end"]
_hx_methods = ["fromJSON"]
def __init__(self,json):
self.end = None
self.start = None
self.fromJSON(json)
def fromJSON(self,json):
self.start = (json[0] if 0 < len(json) else None)
self.end = (json[1] if 1 < len(json) else None)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.start = None
_hx_o.end = None
apptimize_models_ABTRange._hx_class = apptimize_models_ABTRange
_hx_classes["apptimize.models.ABTRange"] = apptimize_models_ABTRange
class apptimize_models_ABTRangeGroup(apptimize_filter_ABTFilterableObject):
_hx_class_name = "apptimize.models.ABTRangeGroup"
_hx_is_interface = "False"
__slots__ = ("ranges", "sdkParameters", "seedGroups", "variants")
_hx_fields = ["ranges", "sdkParameters", "seedGroups", "variants"]
_hx_methods = ["fromJSON", "selectAlterationsIntoArray", "isSelectedBySeed", "getUrlKeyProviders", "applySdkParameters"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilterableObject
def __init__(self,group):
self.variants = None
self.seedGroups = None
self.sdkParameters = None
self.ranges = None
super().__init__()
self.fromJSON(group)
def fromJSON(self,group):
super().fromJSON(group)
rangeGroup = group
self.ranges = list()
if (rangeGroup.ranges is not None):
_g = 0
_g1 = rangeGroup.ranges
while (_g < len(_g1)):
range = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.ranges
x = apptimize_models_ABTRange(range)
_this.append(x)
self.seedGroups = list()
if (rangeGroup.seedGroups is not None):
_g = 0
_g1 = rangeGroup.seedGroups
while (_g < len(_g1)):
sg = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.seedGroups
x = apptimize_models_ABTSeedGroup(sg)
_this.append(x)
self.variants = list()
if (rangeGroup.variants is not None):
_g = 0
_g1 = rangeGroup.variants
while (_g < len(_g1)):
variant = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.variants
x = apptimize_models_ABTVariant(variant)
_this.append(x)
if python_Boot.hasField(group,"sdkParameters"):
self.sdkParameters = apptimize_models_ABTSdkParameters(Reflect.field(group,"sdkParameters"))
def selectAlterationsIntoArray(self,env,target,overrideOnly):
match = self.performFilterMatchingWithEnvironment(env)
if (match.result != apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
return
newOverrideOnly = self.computeNewOverrideState(overrideOnly,match)
_g = 0
_g1 = self.variants
while (_g < len(_g1)):
variant = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
variant.selectAlterationsIntoArray(env,target,newOverrideOnly)
_g = 0
_g1 = self.seedGroups
while (_g < len(_g1)):
seedgroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
seedgroup.selectAlterationsIntoArray(env,target,newOverrideOnly)
def isSelectedBySeed(self,seed):
_g = 0
_g1 = self.ranges
while (_g < len(_g1)):
range = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if ((seed >= range.start) and ((seed < range.end))):
return True
return False
def getUrlKeyProviders(self):
return (self.asUrlProviders(self.seedGroups) + self.asUrlProviders(self.variants))
def applySdkParameters(self,to,env):
if (self.sdkParameters is not None):
to.mergeOther(self.sdkParameters)
_g = 0
_g1 = self.seedGroups
while (_g < len(_g1)):
seedGroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
seedGroup.applySdkParameters(to,env)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.ranges = None
_hx_o.sdkParameters = None
_hx_o.seedGroups = None
_hx_o.variants = None
apptimize_models_ABTRangeGroup._hx_class = apptimize_models_ABTRangeGroup
_hx_classes["apptimize.models.ABTRangeGroup"] = apptimize_models_ABTRangeGroup
class apptimize_models_ABTSdkParameters:
_hx_class_name = "apptimize.models.ABTSdkParameters"
_hx_is_interface = "False"
__slots__ = ("minPostFrequencyMs", "callServerUrls")
_hx_fields = ["minPostFrequencyMs", "callServerUrls"]
_hx_methods = ["fromJSON", "mergeOther"]
def __init__(self,json):
self.minPostFrequencyMs = None
self.callServerUrls = None
if (json is not None):
self.fromJSON(json)
def fromJSON(self,json):
temp = Reflect.field(json,"minPostFrequencyMs")
if (temp is not None):
self.minPostFrequencyMs = temp
temp = Reflect.field(json,"callServerUrls")
if (temp is not None):
self.callServerUrls = apptimize_util_ABTUtilDictionary.dynamicObjectToStringMap(temp)
def mergeOther(self,other):
if (other.minPostFrequencyMs is not None):
self.minPostFrequencyMs = other.minPostFrequencyMs
if (other.callServerUrls is not None):
self.callServerUrls = other.callServerUrls
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.minPostFrequencyMs = None
_hx_o.callServerUrls = None
apptimize_models_ABTSdkParameters._hx_class = apptimize_models_ABTSdkParameters
_hx_classes["apptimize.models.ABTSdkParameters"] = apptimize_models_ABTSdkParameters
class apptimize_models_ABTSeed:
_hx_class_name = "apptimize.models.ABTSeed"
_hx_is_interface = "False"
__slots__ = ("type", "value")
_hx_fields = ["type", "value"]
_hx_methods = ["fromDef", "computedSeedMaterial"]
def __init__(self,seed):
self.value = None
self.type = None
self.fromDef(seed)
def fromDef(self,seed):
self.type = seed.type
self.value = Reflect.field(seed,"value")
def computedSeedMaterial(self,userID):
if (self.type == "guid"):
base = haxe_io_Bytes.ofString("0123456789abcdef")
resultStr = StringTools.replace(userID,"-","").lower()
try:
if (len(resultStr) == 32):
return apptimize_util_ABTHash.Sha1(haxe_crypto_BaseCode(base).decodeBytes(haxe_io_Bytes.ofString(resultStr)))
except BaseException as _g:
None
if Std.isOfType(haxe_Exception.caught(_g).unwrap(),str):
apptimize_ABTLogger.w("Invalid GUID supplied - treating as string user ID.",_hx_AnonObject({'fileName': "src/apptimize/models/ABTSeed.hx", 'lineNumber': 33, 'className': "apptimize.models.ABTSeed", 'methodName': "computedSeedMaterial"}))
else:
raise _g
return apptimize_util_ABTHash.Sha1(haxe_io_Bytes.ofString(userID))
elif (self.value is not None):
return apptimize_util_ABTHash.Sha1(haxe_io_Bytes.ofString(self.value))
else:
apptimize_ABTLogger.e((("Unable to calculate seed for supplied user ID of type: " + HxOverrides.stringOrNull(self.type)) + "."),_hx_AnonObject({'fileName': "src/apptimize/models/ABTSeed.hx", 'lineNumber': 41, 'className': "apptimize.models.ABTSeed", 'methodName': "computedSeedMaterial"}))
return None
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.type = None
_hx_o.value = None
apptimize_models_ABTSeed._hx_class = apptimize_models_ABTSeed
_hx_classes["apptimize.models.ABTSeed"] = apptimize_models_ABTSeed
class apptimize_models_ABTSeedGroup(apptimize_filter_ABTFilterableObject):
_hx_class_name = "apptimize.models.ABTSeedGroup"
_hx_is_interface = "False"
__slots__ = ("rangeGroups", "seeds")
_hx_fields = ["rangeGroups", "seeds"]
_hx_methods = ["fromJSON", "computedSeedMaterial", "seed", "selectAlterationsIntoArray", "getUrlKeyProviders", "applySdkParameters"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilterableObject
def __init__(self,group):
self.seeds = None
self.rangeGroups = None
super().__init__()
self.fromJSON(group)
def fromJSON(self,group):
super().fromJSON(group)
jsonSeedGroup = group
self.rangeGroups = list()
if (jsonSeedGroup.rangeGroups is not None):
_g = 0
_g1 = jsonSeedGroup.rangeGroups
while (_g < len(_g1)):
range = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.rangeGroups
x = apptimize_models_ABTRangeGroup(range)
_this.append(x)
self.seeds = list()
if (jsonSeedGroup.seeds is not None):
_g = 0
_g1 = jsonSeedGroup.seeds
while (_g < len(_g1)):
seed = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.seeds
x = apptimize_models_ABTSeed(seed)
_this.append(x)
def computedSeedMaterial(self,userID):
buffer = haxe_io_BytesBuffer()
_g = 0
_g1 = self.seeds
while (_g < len(_g1)):
seed = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
b = seed.computedSeedMaterial(userID)
_hx_len = b.length
if ((_hx_len < 0) or ((_hx_len > b.length))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
buffer.b.extend(b.b[0:_hx_len])
return apptimize_util_ABTHash.Sha1(buffer.getBytes())
def seed(self,userID):
data = self.computedSeedMaterial(userID)
_hx_len = data.length
if (_hx_len < 4):
apptimize_ABTLogger.e((("User ID length too short for seed: " + Std.string(_hx_len)) + "."),_hx_AnonObject({'fileName': "src/apptimize/models/ABTSeedGroup.hx", 'lineNumber': 64, 'className': "apptimize.models.ABTSeedGroup", 'methodName': "seed"}))
return 0
l = (_hx_len - 4)
seed = (((data.b[(l + 3)] | ((data.b[(l + 2)] << 8))) | ((data.b[(l + 1)] << 16))) | ((data.b[l] << 24)))
seed = (seed & 1073741823)
return seed
def selectAlterationsIntoArray(self,env,target,overrideOnly):
match = self.performFilterMatchingWithEnvironment(env)
if (match.result != apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
return
newOverrideOnly = self.computeNewOverrideState(overrideOnly,match)
seed = self.seed(env.getUserOrAnonID())
apptimize_ABTLogger.v((((("Calculated seed for user " + HxOverrides.stringOrNull(env.getUserOrAnonID())) + ": ") + Std.string(seed)) + "."),_hx_AnonObject({'fileName': "src/apptimize/models/ABTSeedGroup.hx", 'lineNumber': 89, 'className': "apptimize.models.ABTSeedGroup", 'methodName': "selectAlterationsIntoArray"}))
_g = 0
_g1 = self.rangeGroups
while (_g < len(_g1)):
rangeGroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if rangeGroup.isSelectedBySeed(seed):
rangeGroup.selectAlterationsIntoArray(env,target,newOverrideOnly)
else:
rangeGroup.selectAlterationsIntoArray(env,target,True)
def getUrlKeyProviders(self):
return self.asUrlProviders(self.rangeGroups)
def applySdkParameters(self,to,env):
match = self.performFilterMatchingWithEnvironment(env)
if (match.result == apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
seed = self.seed(env.getUserOrAnonID())
_g = 0
_g1 = self.rangeGroups
while (_g < len(_g1)):
rangeGroup = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if rangeGroup.isSelectedBySeed(seed):
rangeGroup.applySdkParameters(to,env)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.rangeGroups = None
_hx_o.seeds = None
apptimize_models_ABTSeedGroup._hx_class = apptimize_models_ABTSeedGroup
_hx_classes["apptimize.models.ABTSeedGroup"] = apptimize_models_ABTSeedGroup
class apptimize_models_ABTVariant(apptimize_filter_ABTFilterableObject):
_hx_class_name = "apptimize.models.ABTVariant"
_hx_is_interface = "False"
__slots__ = ("alterations", "alterations2", "codeBlockName", "experimentId", "experimentName", "experimentType", "startTime", "variantId", "variantName", "cycle", "phase")
_hx_fields = ["alterations", "alterations2", "codeBlockName", "experimentId", "experimentName", "experimentType", "startTime", "variantId", "variantName", "cycle", "phase"]
_hx_methods = ["fromJSON", "selectAlterationsIntoArray", "getVariantID", "getVariantName", "getExperimentID", "getExperimentName", "getExperimentType", "getCodeBlockName", "getPhase", "getCycle", "getUrlKeyProviders"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_filter_ABTFilterableObject
def __init__(self,variant):
self.phase = None
self.cycle = None
self.variantName = None
self.variantId = None
self.startTime = None
self.experimentType = None
self.experimentName = None
self.experimentId = None
self.codeBlockName = None
self.alterations2 = None
self.alterations = None
super().__init__()
self.fromJSON(variant)
def fromJSON(self,obj):
super().fromJSON(obj)
variant = obj
self.alterations = list()
self.alterations2 = list()
_g = 0
_g1 = variant.alterations
while (_g < len(_g1)):
alteration = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.alterations
x = apptimize_models_ABTAlteration.alterationFromJSON(alteration,self)
_this.append(x)
self.codeBlockName = variant.codeBlockName
if (Type.getClass(self) != apptimize_models_ABTHotfixVariant):
self.experimentId = variant.experimentId
self.experimentName = variant.experimentName
self.experimentType = variant.experimentType
self.startTime = variant.startTime
self.variantName = variant.variantName
self.variantId = variant.variantId
self.cycle = variant.cycle
self.phase = variant.phase
if (python_Boot.hasField(variant,"alterations2") and ((variant.alterations2 is not None))):
_g = 0
_g1 = variant.alterations2
while (_g < len(_g1)):
alteration = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self.alterations2
x = apptimize_models_ABTAlteration.alterationFromJSON(alteration,self)
_this.append(x)
self.alterations = (self.alterations + self.alterations2)
def selectAlterationsIntoArray(self,env,target,overrideOnly):
match = self.performFilterMatchingWithEnvironment(env)
if (match.result != apptimize_filter_ABTFilterResult.ABTFilterResultTrue):
return
newOverrideOnly = self.computeNewOverrideState(overrideOnly,match)
_g = 0
_g1 = self.alterations
while (_g < len(_g1)):
alteration = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
alteration.selectAlterationsIntoArray(env,target,newOverrideOnly)
def getVariantID(self):
return self.variantId
def getVariantName(self):
return self.variantName
def getExperimentID(self):
return self.experimentId
def getExperimentName(self):
return self.experimentName
def getExperimentType(self):
return self.experimentType
def getCodeBlockName(self):
return self.codeBlockName
def getPhase(self):
return self.phase
def getCycle(self):
return self.cycle
def getUrlKeyProviders(self):
return self.asUrlProviders(self.alterations)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.alterations = None
_hx_o.alterations2 = None
_hx_o.codeBlockName = None
_hx_o.experimentId = None
_hx_o.experimentName = None
_hx_o.experimentType = None
_hx_o.startTime = None
_hx_o.variantId = None
_hx_o.variantName = None
_hx_o.cycle = None
_hx_o.phase = None
apptimize_models_ABTVariant._hx_class = apptimize_models_ABTVariant
_hx_classes["apptimize.models.ABTVariant"] = apptimize_models_ABTVariant
class apptimize_models_ABTHotfixVariant(apptimize_models_ABTVariant):
_hx_class_name = "apptimize.models.ABTHotfixVariant"
_hx_is_interface = "False"
__slots__ = ("hotfixName",)
_hx_fields = ["hotfixName"]
_hx_methods = ["fromJSON"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_ABTVariant
def __init__(self,variant):
self.hotfixName = None
super().__init__(variant)
def fromJSON(self,obj):
super().fromJSON(obj)
variant = obj
self.hotfixName = variant.hotfixName
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.hotfixName = None
apptimize_models_ABTHotfixVariant._hx_class = apptimize_models_ABTHotfixVariant
_hx_classes["apptimize.models.ABTHotfixVariant"] = apptimize_models_ABTHotfixVariant
class apptimize_models_results_ABTResultEntry(apptimize_models_ABTJSONObject):
_hx_class_name = "apptimize.models.results.ABTResultEntry"
_hx_is_interface = "False"
__slots__ = ("_id", "_monotonicTimestamp", "_deviceTimestamp", "_userAttributes", "_prefixedAttributes")
_hx_fields = ["_id", "_monotonicTimestamp", "_deviceTimestamp", "_userAttributes", "_prefixedAttributes"]
_hx_methods = ["_getNextSequenceNumber", "_getMonotonicTimestamp", "JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = ["RESULT_ENTRY_CREATION_LOCK"]
_hx_interfaces = []
_hx_super = apptimize_models_ABTJSONObject
def __init__(self,env):
self._prefixedAttributes = None
self._userAttributes = None
self._deviceTimestamp = None
self._monotonicTimestamp = None
self._id = None
apptimize_models_results_ABTResultEntry.RESULT_ENTRY_CREATION_LOCK.acquire()
try:
self._id = self._getNextSequenceNumber()
self._deviceTimestamp = haxe_Int64Helper.fromFloat((Date.now().date.timestamp() * 1000))
self._monotonicTimestamp = self._getMonotonicTimestamp(self._deviceTimestamp)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_models_results_ABTResultEntry.RESULT_ENTRY_CREATION_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_models_results_ABTResultEntry.RESULT_ENTRY_CREATION_LOCK.release()
self._prefixedAttributes = haxe_ds_StringMap()
if ((env is not None) and ((env.customProperties.availableProperties is not None))):
env.customProperties.addJSONProperties(self._prefixedAttributes)
apptimize_support_properties_ABTInternalProperties.sharedInstance().addJSONProperties(self._prefixedAttributes)
env.applicationProperties.addJSONProperties(self._prefixedAttributes)
def _getNextSequenceNumber(self):
sequenceString = apptimize_support_persistence_ABTPersistence.loadString(apptimize_support_persistence_ABTPersistence.kResultEntrySequenceKey)
this1 = haxe__Int64____Int64(0,0)
sequence = this1
if (sequenceString is not None):
sequence = haxe_Int64Helper.parseString(sequenceString)
ret = sequence
this1 = haxe__Int64____Int64(sequence.high,sequence.low)
sequence = this1
def _hx_local_2():
_hx_local_0 = sequence
_hx_local_1 = _hx_local_0.low
_hx_local_0.low = (_hx_local_1 + 1)
return _hx_local_1
ret = _hx_local_2()
sequence.low = ((sequence.low + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (sequence.low == 0):
def _hx_local_5():
_hx_local_3 = sequence
_hx_local_4 = _hx_local_3.high
_hx_local_3.high = (_hx_local_4 + 1)
return _hx_local_4
ret = _hx_local_5()
sequence.high = ((sequence.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
apptimize_support_persistence_ABTPersistence.saveString(apptimize_support_persistence_ABTPersistence.kResultEntrySequenceKey,haxe__Int64_Int64_Impl_.toString(sequence))
return sequence
def _getMonotonicTimestamp(self,deviceTime):
timestamp = deviceTime
this1 = haxe__Int64____Int64(0,0)
lastTimestamp = this1
lastTimestampString = apptimize_support_persistence_ABTPersistence.loadString(apptimize_support_persistence_ABTPersistence.kResultEntryTimestampKey)
if (lastTimestampString is not None):
lastTimestamp = haxe_Int64Helper.parseString(lastTimestampString)
v = (((lastTimestamp.high - deviceTime.high) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (v == 0):
v = haxe__Int32_Int32_Impl_.ucompare(lastTimestamp.low,deviceTime.low)
if ((((v if ((deviceTime.high < 0)) else -1) if ((lastTimestamp.high < 0)) else (v if ((deviceTime.high >= 0)) else 1))) >= 0):
b_high = 0
b_low = 1
high = (((lastTimestamp.high + b_high) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((lastTimestamp.low + b_low) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low,lastTimestamp.low) < 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high,low)
timestamp = this1
apptimize_support_persistence_ABTPersistence.saveString(apptimize_support_persistence_ABTPersistence.kResultEntryTimestampKey,haxe__Int64_Int64_Impl_.toString(timestamp))
return timestamp
def JSONRepresentation(self):
_g = haxe_ds_StringMap()
value = apptimize_util_ABTInt64Utils.toPreprocessedString(self._id)
_g.h["ei"] = value
value = apptimize_util_ABTInt64Utils.toPreprocessedString(self._monotonicTimestamp)
_g.h["mt"] = value
value = apptimize_util_ABTInt64Utils.toPreprocessedString(self._deviceTimestamp)
_g.h["dt"] = value
value = apptimize_util_ABTUtilDictionary.filterNullValues(self._prefixedAttributes)
_g.h["pa"] = value
jsonDict = _g
if (self._userAttributes is not None):
v = apptimize_util_ABTUtilDictionary.filterNullValues(self._userAttributes)
jsonDict.h["ua"] = v
return jsonDict
def hxSerialize(self,s):
apptimize_util_ABTInt64Utils._serializeInt64(self._id,s)
apptimize_util_ABTInt64Utils._serializeInt64(self._monotonicTimestamp,s)
apptimize_util_ABTInt64Utils._serializeInt64(self._deviceTimestamp,s)
s.serialize(self._userAttributes)
s.serialize(self._prefixedAttributes)
def hxUnserialize(self,u):
self._id = apptimize_util_ABTInt64Utils._deserializeInt64(u)
self._monotonicTimestamp = apptimize_util_ABTInt64Utils._deserializeInt64(u)
self._deviceTimestamp = apptimize_util_ABTInt64Utils._deserializeInt64(u)
self._userAttributes = u.unserialize()
self._prefixedAttributes = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._id = None
_hx_o._monotonicTimestamp = None
_hx_o._deviceTimestamp = None
_hx_o._userAttributes = None
_hx_o._prefixedAttributes = None
apptimize_models_results_ABTResultEntry._hx_class = apptimize_models_results_ABTResultEntry
_hx_classes["apptimize.models.results.ABTResultEntry"] = apptimize_models_results_ABTResultEntry
class apptimize_models_results_ABTResultEntryVariantShown(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryVariantShown"
_hx_is_interface = "False"
__slots__ = ("_type", "_variantID", "_cycle", "_phase")
_hx_fields = ["_type", "_variantID", "_cycle", "_phase"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,env,variantID,cycle,phase):
self._phase = None
self._cycle = None
self._variantID = None
self._type = "v"
super().__init__(env)
self._variantID = variantID
self._cycle = cycle
self._phase = phase
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = self._variantID
jsonDict.h["v"] = v
_g = haxe_ds_StringMap()
_g.h["v"] = self._variantID
_g.h["c"] = self._cycle
_g.h["p"] = self._phase
v = _g
jsonDict.h["vp"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
s.serialize(self._variantID)
s.serialize(self._cycle)
s.serialize(self._phase)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._variantID = u.unserialize()
self._cycle = u.unserialize()
self._phase = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._variantID = None
_hx_o._cycle = None
_hx_o._phase = None
apptimize_models_results_ABTResultEntryVariantShown._hx_class = apptimize_models_results_ABTResultEntryVariantShown
_hx_classes["apptimize.models.results.ABTResultEntryVariantShown"] = apptimize_models_results_ABTResultEntryVariantShown
class apptimize_models_results_ABTResultEntryEvent(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryEvent"
_hx_is_interface = "False"
__slots__ = ("_type", "_name", "_source", "_attributes")
_hx_fields = ["_type", "_name", "_source", "_attributes"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,env,name,source,attributes):
self._attributes = None
self._source = None
self._name = None
self._type = "ee"
super().__init__(env)
self._name = name
self._source = source
self._attributes = attributes
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = self._name
jsonDict.h["n"] = v
v = self._source
jsonDict.h["s"] = v
v = self._attributes
jsonDict.h["a"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
s.serialize(self._name)
s.serialize(self._source)
s.serialize(self._attributes)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._name = u.unserialize()
self._source = u.unserialize()
self._attributes = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._name = None
_hx_o._source = None
_hx_o._attributes = None
apptimize_models_results_ABTResultEntryEvent._hx_class = apptimize_models_results_ABTResultEntryEvent
_hx_classes["apptimize.models.results.ABTResultEntryEvent"] = apptimize_models_results_ABTResultEntryEvent
class apptimize_models_results_ABTResultEntryMetadataProcessed(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryMetadataProcessed"
_hx_is_interface = "False"
__slots__ = ("_type", "_metadataSequenceNumber", "_enrolledVariantsCyclesPhases", "_enrolledVariantIDs")
_hx_fields = ["_type", "_metadataSequenceNumber", "_enrolledVariantsCyclesPhases", "_enrolledVariantIDs"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,env,sequence,enrolledVariantsCyclesPhases):
self._enrolledVariantIDs = None
self._enrolledVariantsCyclesPhases = None
self._metadataSequenceNumber = None
self._type = "md"
super().__init__(env)
self._metadataSequenceNumber = sequence
self._enrolledVariantsCyclesPhases = enrolledVariantsCyclesPhases
self._enrolledVariantIDs = list()
_g = 0
_g1 = self._enrolledVariantsCyclesPhases
while (_g < len(_g1)):
variantCyclePhase = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self._enrolledVariantIDs
x = variantCyclePhase.v
_this.append(x)
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
if (len(self._enrolledVariantsCyclesPhases) > 0):
v = self._enrolledVariantsCyclesPhases
jsonDict.h["vp"] = v
v = self._enrolledVariantIDs
jsonDict.h["v"] = v
v = self._metadataSequenceNumber
jsonDict.h["s"] = v
v = self._type
jsonDict.h["ty"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
s.serialize(self._metadataSequenceNumber)
s.serialize(self._enrolledVariantIDs)
s.serialize(self._enrolledVariantsCyclesPhases)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._metadataSequenceNumber = u.unserialize()
self._enrolledVariantIDs = u.unserialize()
self._enrolledVariantsCyclesPhases = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._metadataSequenceNumber = None
_hx_o._enrolledVariantsCyclesPhases = None
_hx_o._enrolledVariantIDs = None
apptimize_models_results_ABTResultEntryMetadataProcessed._hx_class = apptimize_models_results_ABTResultEntryMetadataProcessed
_hx_classes["apptimize.models.results.ABTResultEntryMetadataProcessed"] = apptimize_models_results_ABTResultEntryMetadataProcessed
class apptimize_models_results_ABTResultEntryAttributesChanged(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryAttributesChanged"
_hx_is_interface = "False"
__slots__ = ("_type", "_enrolledVariantsCyclesPhases", "_enrolledVariantIDs")
_hx_fields = ["_type", "_enrolledVariantsCyclesPhases", "_enrolledVariantIDs"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,env,enrolledVariantsCyclesPhases):
self._enrolledVariantIDs = None
self._enrolledVariantsCyclesPhases = None
self._type = "ac"
super().__init__(env)
self._enrolledVariantsCyclesPhases = enrolledVariantsCyclesPhases
self._enrolledVariantIDs = list()
_g = 0
_g1 = self._enrolledVariantsCyclesPhases
while (_g < len(_g1)):
variantCyclePhase = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_this = self._enrolledVariantIDs
x = variantCyclePhase.v
_this.append(x)
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = self._enrolledVariantIDs
jsonDict.h["v"] = v
if (len(self._enrolledVariantsCyclesPhases) > 0):
v = self._enrolledVariantsCyclesPhases
jsonDict.h["vp"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
s.serialize(self._enrolledVariantIDs)
s.serialize(self._enrolledVariantsCyclesPhases)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._enrolledVariantIDs = u.unserialize()
self._enrolledVariantsCyclesPhases = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._enrolledVariantsCyclesPhases = None
_hx_o._enrolledVariantIDs = None
apptimize_models_results_ABTResultEntryAttributesChanged._hx_class = apptimize_models_results_ABTResultEntryAttributesChanged
_hx_classes["apptimize.models.results.ABTResultEntryAttributesChanged"] = apptimize_models_results_ABTResultEntryAttributesChanged
class apptimize_models_results_ABTResultEntryUserEnd(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryUserEnd"
_hx_is_interface = "False"
__slots__ = ("_type", "_nextUserID")
_hx_fields = ["_type", "_nextUserID"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,env,nextUserID):
self._nextUserID = None
self._type = "ue"
super().__init__(env)
self._nextUserID = nextUserID
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = self._nextUserID
jsonDict.h["n"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
s.serialize(self._nextUserID)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._nextUserID = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._nextUserID = None
apptimize_models_results_ABTResultEntryUserEnd._hx_class = apptimize_models_results_ABTResultEntryUserEnd
_hx_classes["apptimize.models.results.ABTResultEntryUserEnd"] = apptimize_models_results_ABTResultEntryUserEnd
class apptimize_models_results_ABTResultEntryUserStart(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryUserStart"
_hx_is_interface = "False"
__slots__ = ("_type", "_previousUserID")
_hx_fields = ["_type", "_previousUserID"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,env,previousUserID):
self._previousUserID = None
self._type = "us"
super().__init__(env)
self._previousUserID = previousUserID
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = self._previousUserID
jsonDict.h["p"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
s.serialize(self._previousUserID)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._previousUserID = u.unserialize()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._previousUserID = None
apptimize_models_results_ABTResultEntryUserStart._hx_class = apptimize_models_results_ABTResultEntryUserStart
_hx_classes["apptimize.models.results.ABTResultEntryUserStart"] = apptimize_models_results_ABTResultEntryUserStart
class apptimize_models_results_ABTResultEntrySuccessfullyPosted(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntrySuccessfullyPosted"
_hx_is_interface = "False"
__slots__ = ("_type", "_timestampFromServer", "_firstEntryID", "_lastEntryID")
_hx_fields = ["_type", "_timestampFromServer", "_firstEntryID", "_lastEntryID"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,timestampFromServer,firstEntryID,lastEntryID):
this1 = haxe__Int64____Int64(0,0)
self._lastEntryID = this1
this1 = haxe__Int64____Int64(0,0)
self._firstEntryID = this1
this1 = haxe__Int64____Int64(0,0)
self._timestampFromServer = this1
self._type = "sp"
super().__init__(None)
self._timestampFromServer = timestampFromServer
self._firstEntryID = firstEntryID
self._lastEntryID = lastEntryID
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = apptimize_util_ABTInt64Utils.toPreprocessedString(self._timestampFromServer)
jsonDict.h["t"] = v
v = apptimize_util_ABTInt64Utils.toPreprocessedString(self._firstEntryID)
jsonDict.h["f"] = v
v = apptimize_util_ABTInt64Utils.toPreprocessedString(self._lastEntryID)
jsonDict.h["l"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
apptimize_util_ABTInt64Utils._serializeInt64(self._timestampFromServer,s)
apptimize_util_ABTInt64Utils._serializeInt64(self._firstEntryID,s)
apptimize_util_ABTInt64Utils._serializeInt64(self._lastEntryID,s)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._timestampFromServer = apptimize_util_ABTInt64Utils._deserializeInt64(u)
self._firstEntryID = apptimize_util_ABTInt64Utils._deserializeInt64(u)
self._lastEntryID = apptimize_util_ABTInt64Utils._deserializeInt64(u)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._timestampFromServer = None
_hx_o._firstEntryID = None
_hx_o._lastEntryID = None
apptimize_models_results_ABTResultEntrySuccessfullyPosted._hx_class = apptimize_models_results_ABTResultEntrySuccessfullyPosted
_hx_classes["apptimize.models.results.ABTResultEntrySuccessfullyPosted"] = apptimize_models_results_ABTResultEntrySuccessfullyPosted
class apptimize_models_results_ABTResultEntryDataTypeLimitReached(apptimize_models_results_ABTResultEntry):
_hx_class_name = "apptimize.models.results.ABTResultEntryDataTypeLimitReached"
_hx_is_interface = "False"
__slots__ = ("_type", "_currentEntryCount")
_hx_fields = ["_type", "_currentEntryCount"]
_hx_methods = ["JSONRepresentation", "hxSerialize", "hxUnserialize"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_models_results_ABTResultEntry
def __init__(self,currentEntryCount):
this1 = haxe__Int64____Int64(0,0)
self._currentEntryCount = this1
self._type = "dl"
super().__init__(None)
self._currentEntryCount = currentEntryCount
def JSONRepresentation(self):
jsonDict = super().JSONRepresentation()
v = self._type
jsonDict.h["ty"] = v
v = apptimize_util_ABTInt64Utils.toPreprocessedString(self._currentEntryCount)
jsonDict.h["c"] = v
return jsonDict
def hxSerialize(self,s):
super().hxSerialize(s)
s.serialize(self._type)
apptimize_util_ABTInt64Utils._serializeInt64(self._currentEntryCount,s)
def hxUnserialize(self,u):
super().hxUnserialize(u)
self._type = u.unserialize()
self._currentEntryCount = apptimize_util_ABTInt64Utils._deserializeInt64(u)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._type = None
_hx_o._currentEntryCount = None
apptimize_models_results_ABTResultEntryDataTypeLimitReached._hx_class = apptimize_models_results_ABTResultEntryDataTypeLimitReached
_hx_classes["apptimize.models.results.ABTResultEntryDataTypeLimitReached"] = apptimize_models_results_ABTResultEntryDataTypeLimitReached
class apptimize_models_results_ABTResultLog:
_hx_class_name = "apptimize.models.results.ABTResultLog"
_hx_is_interface = "False"
__slots__ = ("entries", "userID", "anonID", "appkey", "uniqueID")
_hx_fields = ["entries", "userID", "anonID", "appkey", "uniqueID"]
_hx_methods = ["logEntry", "entryCount", "getAppKey", "getUniqueUserKey", "toJSON"]
def __init__(self,env):
self.uniqueID = None
self.appkey = None
self.anonID = None
self.userID = None
if (env is not None):
self.userID = env.userID
self.anonID = env.anonID
self.appkey = env.appkey
self.uniqueID = env.getUniqueUserID()
self.entries = list()
def logEntry(self,entry):
_this = self.entries
_this.append(entry)
def entryCount(self):
return len(self.entries)
def getAppKey(self):
return self.appkey
def getUniqueUserKey(self):
return self.uniqueID
def toJSON(self):
json = haxe_ds_StringMap()
jsonEntries = list()
_g = 0
_g1 = self.entries
while (_g < len(_g1)):
entry = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
x = entry.JSONRepresentation()
jsonEntries.append(x)
v = "v4"
json.h["type"] = v
v = self.getAppKey()
json.h["a"] = v
v = apptimize_ABTDataStore.getServerGUID()
json.h["g"] = v
currentDate = Date.now()
nowMs = haxe_Int64Helper.fromFloat((Date.now().date.timestamp() * 1000))
v = apptimize_util_ABTInt64Utils.toPreprocessedString(nowMs)
json.h["c"] = v
v = jsonEntries
json.h["e"] = v
v = ("Cross Platform " + HxOverrides.stringOrNull(apptimize_Apptimize.getApptimizeSDKVersion()))
json.h["v"] = v
if (self.userID is not None):
v = self.userID
json.h["u"] = v
return apptimize_util_ABTJSONUtils.stringify(json)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.entries = None
_hx_o.userID = None
_hx_o.anonID = None
_hx_o.appkey = None
_hx_o.uniqueID = None
apptimize_models_results_ABTResultLog._hx_class = apptimize_models_results_ABTResultLog
_hx_classes["apptimize.models.results.ABTResultLog"] = apptimize_models_results_ABTResultLog
class apptimize_support_initialize_ABTPlatformInitialize:
_hx_class_name = "apptimize.support.initialize.ABTPlatformInitialize"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_isThreadingEnabled", "initialize", "hookShutdown", "shutdownPlatform", "shutdownHook"]
_isThreadingEnabled = None
@staticmethod
def initialize():
apptimize_support_persistence_ABTPersistence.saveString(apptimize_support_persistence_ABTPersistence.kApptimizeVersionKey,apptimize_Apptimize.getApptimizeSDKVersion())
apptimize_support_initialize_ABTPlatformInitialize._isThreadingEnabled = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.THREADING_ENABLED_KEY)
if (not apptimize_support_initialize_ABTPlatformInitialize._isThreadingEnabled):
apptimize_ABTLogger.w("Metadata update timers are disabled when threading is disabled.",_hx_AnonObject({'fileName': "src/apptimize/support/initialize/ABTPlatformInitialize.hx", 'lineNumber': 104, 'className': "apptimize.support.initialize.ABTPlatformInitialize", 'methodName': "initialize"}))
apptimize_api_ABTMetadataPoller.startPolling()
apptimize_support_initialize_ABTPlatformInitialize.hookShutdown(apptimize_support_initialize_ABTPlatformInitialize._isThreadingEnabled)
@staticmethod
def hookShutdown(isThreaded):
if (not apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.AUTOMATIC_SHUTDOWN_HOOK)):
apptimize_ABTLogger.v("Process exits will not be handled to persist Apptimize library state across server restarts.",_hx_AnonObject({'fileName': "src/apptimize/support/initialize/ABTPlatformInitialize.hx", 'lineNumber': 119, 'className': "apptimize.support.initialize.ABTPlatformInitialize", 'methodName': "hookShutdown"}))
return
if isThreaded:
apptimize_native_python_AtExit.register(apptimize_support_initialize_ABTPlatformInitialize.shutdownHook)
@staticmethod
def shutdownPlatform():
apptimize_api_ABTMetadataPoller.stopPolling()
@staticmethod
def shutdownHook():
apptimize_ApptimizeInternal.shutdown()
apptimize_support_initialize_ABTPlatformInitialize._hx_class = apptimize_support_initialize_ABTPlatformInitialize
_hx_classes["apptimize.support.initialize.ABTPlatformInitialize"] = apptimize_support_initialize_ABTPlatformInitialize
class apptimize_support_persistence_ABTPersistentInterface:
_hx_class_name = "apptimize.support.persistence.ABTPersistentInterface"
_hx_is_interface = "False"
__slots__ = ()
_hx_methods = ["save", "load", "clear", "saveObject", "loadObject", "sync", "hasDidUnserialize"]
def save(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
raise haxe_Exception.thrown("ABTPersistentInterface.save not implemented")
def load(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
raise haxe_Exception.thrown("ABTPersistentInterface.load not implemented")
def clear(self,latency = None):
if (latency is None):
latency = 2
def saveObject(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
serializer = haxe_Serializer()
serializer.serialize(value)
serializer.serialize(apptimize_Apptimize.getApptimizeSDKVersion())
self.save(key,serializer.toString(),latency,compress)
def loadObject(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
_gthis = self
result = None
def _hx_local_0(key,serializedObject):
nonlocal result
if (serializedObject is not None):
try:
unserializer = haxe_Unserializer(serializedObject)
result = unserializer.unserialize()
if ((result is not None) and _gthis.hasDidUnserialize(result)):
Reflect.field(result,"didUnserialize")()
except BaseException as _g:
None
unknown = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(((("Error deserializing \"" + ("null" if key is None else key)) + "\" from persistent storage. Error: ") + Std.string(unknown)),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPersistence.hx", 'lineNumber': 50, 'className': "apptimize.support.persistence.ABTPersistentInterface", 'methodName': "loadObject"}))
if (callback is not None):
callback(key,result)
return result
if (callback is not None):
callback(key,None)
return result
processObject = _hx_local_0
if (callback is not None):
return self.load(key,latency,processObject)
else:
serializedObject = self.load(key,latency,callback)
return processObject(key,serializedObject)
def sync(self,key,fromLatency,toLatency,callback = None):
def _hx_local_0(key,value):
if (callback is not None):
callback(key,value)
onCallback = _hx_local_0
self.loadObject(key,fromLatency,onCallback)
def hasDidUnserialize(self,obj):
return python_Boot.hasField(obj,"didUnserialize")
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_support_persistence_ABTPersistentInterface._hx_class = apptimize_support_persistence_ABTPersistentInterface
_hx_classes["apptimize.support.persistence.ABTPersistentInterface"] = apptimize_support_persistence_ABTPersistentInterface
class apptimize_support_persistence_ABTPICacheStorage(apptimize_support_persistence_ABTPersistentInterface):
_hx_class_name = "apptimize.support.persistence.ABTPICacheStorage"
_hx_is_interface = "False"
__slots__ = ("cacheMap",)
_hx_fields = ["cacheMap"]
_hx_methods = ["save", "saveObject", "loadObject", "load", "clear", "sync"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_support_persistence_ABTPersistentInterface
def __init__(self):
self.cacheMap = haxe_ds_StringMap()
def save(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
if (value is not None):
self.cacheMap.h[key] = value
else:
self.cacheMap.remove(key)
def saveObject(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
if (value is not None):
self.cacheMap.h[key] = value
else:
self.cacheMap.remove(key)
def loadObject(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
value = self.cacheMap.h.get(key,None)
if (callback is not None):
callback(key,value)
return value
def load(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
value = self.cacheMap.h.get(key,None)
if (callback is not None):
callback(key,value)
return value
def clear(self,latency = None):
if (latency is None):
latency = 0
self.cacheMap = haxe_ds_StringMap()
def sync(self,key,fromLatency,toLatency,callback = None):
if (callback is not None):
callback(key,self.load(key))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.cacheMap = None
apptimize_support_persistence_ABTPICacheStorage._hx_class = apptimize_support_persistence_ABTPICacheStorage
_hx_classes["apptimize.support.persistence.ABTPICacheStorage"] = apptimize_support_persistence_ABTPICacheStorage
class apptimize_support_persistence_ABTPIDiskStorage(apptimize_support_persistence_ABTPersistentInterface):
_hx_class_name = "apptimize.support.persistence.ABTPIDiskStorage"
_hx_is_interface = "False"
__slots__ = ("_localStoragePath", "_extension", "_keys")
_hx_fields = ["_localStoragePath", "_extension", "_keys"]
_hx_methods = ["_dataFromDisk", "_deleteFile", "save", "load", "clear"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_support_persistence_ABTPersistentInterface
def __init__(self):
self._keys = None
self._extension = ".data"
self._localStoragePath = "data/apptimize/"
self._localStoragePath = apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.LOCAL_DISK_STORAGE_PATH_KEY)
if (not sys_FileSystem.exists(self._localStoragePath)):
sys_FileSystem.createDirectory(self._localStoragePath)
self._keys = list()
def _dataFromDisk(self,path,localPath = None):
if (localPath is None):
localPath = ""
filePath = ((("null" if localPath is None else localPath) + ("null" if path is None else path)) + HxOverrides.stringOrNull(self._extension))
if sys_FileSystem.exists(filePath):
content = sys_io_File.getContent(filePath)
return content
else:
apptimize_ABTLogger.v((("File not found: " + ("null" if filePath is None else filePath)) + ". Unable to load data from disk."),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPIDiskStorage.hx", 'lineNumber': 30, 'className': "apptimize.support.persistence.ABTPIDiskStorage", 'methodName': "_dataFromDisk"}))
return None
def _deleteFile(self,key,localPath = None):
if (localPath is None):
localPath = ""
if sys_FileSystem.exists(((("null" if localPath is None else localPath) + ("null" if key is None else key)) + HxOverrides.stringOrNull(self._extension))):
sys_FileSystem.deleteFile(((("null" if localPath is None else localPath) + ("null" if key is None else key)) + HxOverrides.stringOrNull(self._extension)))
def save(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
filePath = ((HxOverrides.stringOrNull(self._localStoragePath) + ("null" if key is None else key)) + HxOverrides.stringOrNull(self._extension))
if (value is not None):
sys_io_File.saveContent(filePath,value)
_this = self._keys
_this.append(key)
else:
self._deleteFile(key,self._localStoragePath)
python_internal_ArrayImpl.remove(self._keys,key)
def load(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
result = self._dataFromDisk(key,self._localStoragePath)
if (callback is not None):
callback(key,result)
return result
def clear(self,latency = None):
if (latency is None):
latency = 2
_g = 0
_g1 = self._keys
while (_g < len(_g1)):
key = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
self._deleteFile(key,self._localStoragePath)
self._keys = list()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._localStoragePath = None
_hx_o._extension = None
_hx_o._keys = None
apptimize_support_persistence_ABTPIDiskStorage._hx_class = apptimize_support_persistence_ABTPIDiskStorage
_hx_classes["apptimize.support.persistence.ABTPIDiskStorage"] = apptimize_support_persistence_ABTPIDiskStorage
class apptimize_support_persistence_ABTPISmartStorage(apptimize_support_persistence_ABTPersistentInterface):
_hx_class_name = "apptimize.support.persistence.ABTPISmartStorage"
_hx_is_interface = "False"
__slots__ = ("_lowLatencyStorage", "_highLatencyStorage")
_hx_fields = ["_lowLatencyStorage", "_highLatencyStorage"]
_hx_methods = ["save", "load", "saveObject", "loadObject", "storageForLatency", "clear", "sync", "deleteHighLatencyOnSync"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_support_persistence_ABTPersistentInterface
def __init__(self,lowLatencyStorage,highLatencyStorage):
self._lowLatencyStorage = lowLatencyStorage
self._highLatencyStorage = highLatencyStorage
def save(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
self.storageForLatency(latency).save(key,value,latency)
def load(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
return self.storageForLatency(latency).load(key,latency,callback)
def saveObject(self,key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
self.storageForLatency(latency).saveObject(key,value,latency)
def loadObject(self,key,latency = None,callback = None):
if (latency is None):
latency = 0
return self.storageForLatency(latency).loadObject(key,latency,callback)
def storageForLatency(self,latency):
if (latency == apptimize_support_persistence_ABTPersistence.LOW_LATENCY):
return self._lowLatencyStorage
return self._highLatencyStorage
def clear(self,latency = None):
if (latency is None):
latency = 2
if ((latency == apptimize_support_persistence_ABTPersistence.LOW_LATENCY) or ((latency == apptimize_support_persistence_ABTPersistence.ALL_LATENCY))):
self._lowLatencyStorage.clear(latency)
if ((latency == apptimize_support_persistence_ABTPersistence.HIGH_LATENCY) or ((latency == apptimize_support_persistence_ABTPersistence.ALL_LATENCY))):
self._highLatencyStorage.clear(latency)
def sync(self,key,fromLatency,toLatency,callback = None):
_gthis = self
def _hx_local_0(key,value):
_gthis.saveObject(key,value,toLatency)
if ((fromLatency == apptimize_support_persistence_ABTPersistence.HIGH_LATENCY) and _gthis.deleteHighLatencyOnSync()):
_gthis.save(key,None,fromLatency)
if (callback is not None):
callback(key,value)
onCallback = _hx_local_0
self.loadObject(key,fromLatency,onCallback)
def deleteHighLatencyOnSync(self):
return True
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._lowLatencyStorage = None
_hx_o._highLatencyStorage = None
apptimize_support_persistence_ABTPISmartStorage._hx_class = apptimize_support_persistence_ABTPISmartStorage
_hx_classes["apptimize.support.persistence.ABTPISmartStorage"] = apptimize_support_persistence_ABTPISmartStorage
class apptimize_support_persistence_ABTPersistence:
_hx_class_name = "apptimize.support.persistence.ABTPersistence"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["LOW_LATENCY", "HIGH_LATENCY", "ALL_LATENCY", "kMetadataKey", "kUserIDKey", "kAnonymousGuidKey", "kCustomPropertiesKey", "kInternalPropertiesKey", "kResultLogsKey", "kResultPostsKey", "kResultPostsListKey", "kResultEntrySequenceKey", "kResultEntryTimestampKey", "kApptimizeVersionKey", "kLockAccessKey", "kPostManagementKey", "kResultLastSubmitTimeKey", "kMetadataLastCheckTimeKey", "kDisabledVersions", "_persistentInterface", "_isFlushing", "getPersistentInterface", "shutdown", "loadFromHighLatency", "saveToHighLatency", "clear", "saveString", "saveObject", "flushTracking", "loadString", "loadObject"]
_persistentInterface = None
@staticmethod
def getPersistentInterface():
if (apptimize_support_persistence_ABTPersistence._persistentInterface is None):
if apptimize_support_properties_ABTConfigProperties.sharedInstance().isPropertyAvailable(apptimize_support_properties_ABTConfigProperties.STORAGE_TYPE_KEY):
if (apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.STORAGE_TYPE_KEY) == "memory"):
apptimize_support_persistence_ABTPersistence._persistentInterface = apptimize_support_persistence_ABTPICacheStorage()
return apptimize_support_persistence_ABTPersistence._persistentInterface
apptimize_support_persistence_ABTPersistence._persistentInterface = apptimize_support_persistence_ABTPISmartStorage(apptimize_support_persistence_ABTPICacheStorage(),apptimize_support_persistence_ABTPIDiskStorage())
return apptimize_support_persistence_ABTPersistence._persistentInterface
@staticmethod
def shutdown():
apptimize_support_persistence_ABTPersistence._persistentInterface = None
@staticmethod
def loadFromHighLatency(callback):
keys = [apptimize_support_persistence_ABTPersistence.kMetadataKey, apptimize_support_persistence_ABTPersistence.kUserIDKey, apptimize_support_persistence_ABTPersistence.kAnonymousGuidKey, apptimize_support_persistence_ABTPersistence.kCustomPropertiesKey, apptimize_support_persistence_ABTPersistence.kInternalPropertiesKey, apptimize_support_persistence_ABTPersistence.kResultLogsKey, apptimize_support_persistence_ABTPersistence.kResultPostsKey, apptimize_support_persistence_ABTPersistence.kResultEntrySequenceKey, apptimize_support_persistence_ABTPersistence.kResultEntryTimestampKey, apptimize_support_persistence_ABTPersistence.kApptimizeVersionKey, apptimize_support_persistence_ABTPersistence.kLockAccessKey, apptimize_support_persistence_ABTPersistence.kPostManagementKey, apptimize_support_persistence_ABTPersistence.kResultLastSubmitTimeKey, apptimize_support_persistence_ABTPersistence.kMetadataLastCheckTimeKey, apptimize_support_persistence_ABTPersistence.kResultPostsListKey]
syncedKeys = list(keys)
def _hx_local_0(key,value):
python_internal_ArrayImpl.remove(syncedKeys,key)
if (len(syncedKeys) == 0):
callback()
onSync = _hx_local_0
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
_g = 0
while (_g < len(keys)):
key = (keys[_g] if _g >= 0 and _g < len(keys) else None)
_g = (_g + 1)
apptimize_support_persistence_ABTPersistence.getPersistentInterface().sync(key,apptimize_support_persistence_ABTPersistence.HIGH_LATENCY,apptimize_support_persistence_ABTPersistence.LOW_LATENCY,onSync)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
@staticmethod
def saveToHighLatency():
keys = [apptimize_support_persistence_ABTPersistence.kMetadataKey, apptimize_support_persistence_ABTPersistence.kUserIDKey, apptimize_support_persistence_ABTPersistence.kAnonymousGuidKey, apptimize_support_persistence_ABTPersistence.kCustomPropertiesKey, apptimize_support_persistence_ABTPersistence.kInternalPropertiesKey, apptimize_support_persistence_ABTPersistence.kResultLogsKey, apptimize_support_persistence_ABTPersistence.kResultPostsKey, apptimize_support_persistence_ABTPersistence.kResultEntrySequenceKey, apptimize_support_persistence_ABTPersistence.kResultEntryTimestampKey, apptimize_support_persistence_ABTPersistence.kApptimizeVersionKey, apptimize_support_persistence_ABTPersistence.kLockAccessKey, apptimize_support_persistence_ABTPersistence.kPostManagementKey, apptimize_support_persistence_ABTPersistence.kResultLastSubmitTimeKey, apptimize_support_persistence_ABTPersistence.kMetadataLastCheckTimeKey, apptimize_support_persistence_ABTPersistence.kResultPostsListKey]
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
_g = 0
while (_g < len(keys)):
key = (keys[_g] if _g >= 0 and _g < len(keys) else None)
_g = (_g + 1)
apptimize_support_persistence_ABTPersistence.getPersistentInterface().sync(key,apptimize_support_persistence_ABTPersistence.LOW_LATENCY,apptimize_support_persistence_ABTPersistence.HIGH_LATENCY)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
@staticmethod
def clear():
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
apptimize_support_persistence_ABTPersistence.getPersistentInterface().clear(apptimize_support_persistence_ABTPersistence.ALL_LATENCY)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
@staticmethod
def saveString(key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
try:
apptimize_support_persistence_ABTPersistence.getPersistentInterface().save(key,value,latency,compress)
except BaseException as _g:
None
unknown = haxe_Exception.caught(_g).unwrap()
if (not apptimize_support_persistence_ABTPersistence._isFlushing):
apptimize_ABTLogger.e(((("Unable to store \"" + ("null" if key is None else key)) + "\" to persistent storage. Submitting all pending results data. Error: ") + Std.string(unknown)),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPersistence.hx", 'lineNumber': 215, 'className': "apptimize.support.persistence.ABTPersistence", 'methodName': "saveString"}))
apptimize_support_persistence_ABTPersistence.flushTracking()
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
@staticmethod
def saveObject(key,value,latency = None,compress = None):
if (latency is None):
latency = 0
if (compress is None):
compress = False
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
try:
apptimize_support_persistence_ABTPersistence.getPersistentInterface().saveObject(key,value,latency,compress)
except BaseException as _g:
None
unknown = haxe_Exception.caught(_g).unwrap()
if (not apptimize_support_persistence_ABTPersistence._isFlushing):
apptimize_ABTLogger.e(((("Unable to store \"" + ("null" if key is None else key)) + "\" to persistent storage. Submitting all pending results data. Error: ") + Std.string(unknown)),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPersistence.hx", 'lineNumber': 229, 'className': "apptimize.support.persistence.ABTPersistence", 'methodName': "saveObject"}))
apptimize_support_persistence_ABTPersistence.flushTracking()
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
@staticmethod
def flushTracking():
if apptimize_support_persistence_ABTPersistence._isFlushing:
return
apptimize_support_persistence_ABTPersistence._isFlushing = True
try:
apptimize_Apptimize.flushTracking()
apptimize_support_persistence_ABTPersistence._isFlushing = False
except BaseException as _g:
er = haxe_Exception.caught(_g)
apptimize_support_persistence_ABTPersistence._isFlushing = False
apptimize_ABTLogger.e(("Error on flushing pending results data " + Std.string(er)),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPersistence.hx", 'lineNumber': 247, 'className': "apptimize.support.persistence.ABTPersistence", 'methodName': "flushTracking"}))
@staticmethod
def loadString(key,latency = None):
if (latency is None):
latency = 0
result = None
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
try:
result = apptimize_support_persistence_ABTPersistence.getPersistentInterface().load(key,latency)
except BaseException as _g:
None
unknown = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(((("Unable to retrieve \"" + ("null" if key is None else key)) + "\" from persistent storage. Error: ") + Std.string(unknown)),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPersistence.hx", 'lineNumber': 261, 'className': "apptimize.support.persistence.ABTPersistence", 'methodName': "loadString"}))
return None
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
return result
@staticmethod
def loadObject(key,latency = None):
if (latency is None):
latency = 0
obj = None
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.acquire()
try:
try:
obj = apptimize_support_persistence_ABTPersistence.getPersistentInterface().loadObject(key,latency,None)
except BaseException as _g:
None
unknown = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(((("Unable to retrieve \"" + ("null" if key is None else key)) + "\" from persistent storage. Error: ") + Std.string(unknown)),_hx_AnonObject({'fileName': "src/apptimize/support/persistence/ABTPersistence.hx", 'lineNumber': 274, 'className': "apptimize.support.persistence.ABTPersistence", 'methodName': "loadObject"}))
return None
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
raise haxe_Exception.thrown(e)
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK.release()
return obj
apptimize_support_persistence_ABTPersistence._hx_class = apptimize_support_persistence_ABTPersistence
_hx_classes["apptimize.support.persistence.ABTPersistence"] = apptimize_support_persistence_ABTPersistence
class apptimize_support_properties_ABTProperties:
_hx_class_name = "apptimize.support.properties.ABTProperties"
_hx_is_interface = "False"
__slots__ = ("availableProperties", "PROPERTYLOCK")
_hx_fields = ["availableProperties", "PROPERTYLOCK"]
_hx_methods = ["setPropertyDefaults", "isPropertyAvailable", "valueForProperty", "setProperty", "setProperties"]
def __init__(self):
self.PROPERTYLOCK = apptimize_util_ABTDataLock.getNewLock("property_lock")
self.availableProperties = haxe_ds_StringMap()
self.PROPERTYLOCK.acquire()
try:
self.availableProperties = haxe_ds_StringMap()
self.setPropertyDefaults()
except BaseException as _g:
e = haxe_Exception.caught(_g).unwrap()
self.PROPERTYLOCK.release()
raise haxe_Exception.thrown(e)
self.PROPERTYLOCK.release()
def setPropertyDefaults(self):
pass
def isPropertyAvailable(self,propertyName):
property = None
self.PROPERTYLOCK.acquire()
try:
property = self.availableProperties.h.get(propertyName,None)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.PROPERTYLOCK.release()
raise haxe_Exception.thrown(e)
self.PROPERTYLOCK.release()
return (property is not None)
def valueForProperty(self,propertyName):
property = None
self.PROPERTYLOCK.acquire()
try:
property = self.availableProperties.h.get(propertyName,None)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.PROPERTYLOCK.release()
raise haxe_Exception.thrown(e)
self.PROPERTYLOCK.release()
return property
def setProperty(self,key,value):
self.PROPERTYLOCK.acquire()
try:
self.availableProperties.h[key] = value
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.PROPERTYLOCK.release()
raise haxe_Exception.thrown(e)
self.PROPERTYLOCK.release()
def setProperties(self,stringMap):
key = stringMap.keys()
while key.hasNext():
key1 = key.next()
self.setProperty(key1,stringMap.h.get(key1,None))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.availableProperties = None
_hx_o.PROPERTYLOCK = None
apptimize_support_properties_ABTProperties._hx_class = apptimize_support_properties_ABTProperties
_hx_classes["apptimize.support.properties.ABTProperties"] = apptimize_support_properties_ABTProperties
class apptimize_support_properties_ABTApplicationProperties(apptimize_support_properties_ABTProperties):
_hx_class_name = "apptimize.support.properties.ABTApplicationProperties"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["setPropertyDefaults", "addJSONProperties"]
_hx_statics = ["_instance", "_sigilForApplicationNamespace", "sharedInstance", "getPlatformVersion", "formatPlatformVersion"]
_hx_interfaces = []
_hx_super = apptimize_support_properties_ABTProperties
def __init__(self):
super().__init__()
def setPropertyDefaults(self):
this1 = self.availableProperties
v = apptimize_Apptimize.getApptimizeSDKVersion()
this1.h["apptimize_version"] = v
this1 = self.availableProperties
v = apptimize_Apptimize.getApptimizeSDKPlatform()
this1.h["apptimize_platform"] = v
v = None
self.availableProperties.h["app_version"] = v
v = None
self.availableProperties.h["app_name"] = v
this1 = self.availableProperties
v = apptimize_support_properties_ABTApplicationProperties.getPlatformVersion()
this1.h["system_version"] = v
def addJSONProperties(self,jsonProperties):
key = self.availableProperties.keys()
while key.hasNext():
key1 = key.next()
k = (HxOverrides.stringOrNull(apptimize_support_properties_ABTApplicationProperties._sigilForApplicationNamespace) + ("null" if key1 is None else key1))
v = self.availableProperties.h.get(key1,None)
jsonProperties.h[k] = v
_instance = None
@staticmethod
def sharedInstance():
if (apptimize_support_properties_ABTApplicationProperties._instance is None):
apptimize_support_properties_ABTApplicationProperties._instance = apptimize_support_properties_ABTApplicationProperties()
return apptimize_support_properties_ABTApplicationProperties._instance
@staticmethod
def getPlatformVersion():
return apptimize_support_properties_ABTApplicationProperties.formatPlatformVersion(python_lib_Sys.version_info)
@staticmethod
def formatPlatformVersion(systemVersion):
return ((((Std.string(Reflect.field(systemVersion,"major")) + ".") + Std.string(Reflect.field(systemVersion,"minor"))) + ".") + Std.string(Reflect.field(systemVersion,"micro")))
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_support_properties_ABTApplicationProperties._hx_class = apptimize_support_properties_ABTApplicationProperties
_hx_classes["apptimize.support.properties.ABTApplicationProperties"] = apptimize_support_properties_ABTApplicationProperties
class apptimize_support_properties_ABTConfigProperties(apptimize_support_properties_ABTProperties):
_hx_class_name = "apptimize.support.properties.ABTConfigProperties"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["setPropertyDefaults"]
_hx_statics = ["META_DATA_URL_KEY", "META_DATA_URL_LL_KEY", "META_DATA_URL_HL_KEY", "LOG_LEVEL_KEY", "FOREGROUND_PERIOD_MS_KEY", "RESULT_POST_DELAY_MS_KEY", "THREADING_ENABLED_KEY", "RESULT_POST_THREAD_POOL_SIZE_KEY", "ALTERATION_CACHE_SIZE_KEY", "RESULTS_CACHE_SIZE_KEY", "MAXIMUM_RESULT_ENTRIES_KEY", "MAXIMUM_PENDING_RESULTS_KEY", "METADATA_POLLING_INTERVAL_MS_KEY", "METADATA_POLLING_BACKGROUND_INTERVAL_MS_KEY", "EXCEPTIONS_ENABLED_KEY", "MAXIMUM_RESULT_POST_FAILURE_KEY", "MAXIMUM_RESULT_POST_SENDER_TIMEOUT_MS_KEY", "STORAGE_TYPE_KEY", "AUTOMATIC_SHUTDOWN_HOOK", "APPTIMIZE_ENVIRONMENT_KEY", "APPTIMIZE_REGION_KEY", "COMPRESS_PERSISTENCE_STORE_KEY", "GROUPS_BASE_URL_KEY", "REACT_NATIVE_STORAGE_KEY", "LOCAL_DISK_STORAGE_PATH_KEY", "_instance", "sharedInstance"]
_hx_interfaces = []
_hx_super = apptimize_support_properties_ABTProperties
def __init__(self):
super().__init__()
def setPropertyDefaults(self):
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.META_DATA_URL_LL_KEY] = "https://md-ll.apptimize.com/api/metadata/v4/"
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.META_DATA_URL_HL_KEY] = "https://md-hl.apptimize.com/api/metadata/v4/"
v = None
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY] = v
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.LOG_LEVEL_KEY] = "LOG_LEVEL_WARN"
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.FOREGROUND_PERIOD_MS_KEY] = 10000
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.RESULT_POST_DELAY_MS_KEY] = 60000
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.ALTERATION_CACHE_SIZE_KEY] = 10
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.RESULTS_CACHE_SIZE_KEY] = 10
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_ENTRIES_KEY] = 1000
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.MAXIMUM_PENDING_RESULTS_KEY] = 1000
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_INTERVAL_MS_KEY] = 600000
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_POST_FAILURE_KEY] = 3
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_POST_SENDER_TIMEOUT_MS_KEY] = 3000
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_BACKGROUND_INTERVAL_MS_KEY] = 86400000
v = None
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.STORAGE_TYPE_KEY] = v
v = None
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.APPTIMIZE_ENVIRONMENT_KEY] = v
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.AUTOMATIC_SHUTDOWN_HOOK] = True
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY] = "https://mapi.apptimize.com"
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.LOCAL_DISK_STORAGE_PATH_KEY] = "data/apptimize/"
v = None
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.REACT_NATIVE_STORAGE_KEY] = v
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.THREADING_ENABLED_KEY] = True
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.RESULT_POST_THREAD_POOL_SIZE_KEY] = 20
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.EXCEPTIONS_ENABLED_KEY] = True
self.availableProperties.h[apptimize_support_properties_ABTConfigProperties.COMPRESS_PERSISTENCE_STORE_KEY] = False
_instance = None
@staticmethod
def sharedInstance():
if (apptimize_support_properties_ABTConfigProperties._instance is None):
apptimize_support_properties_ABTConfigProperties._instance = apptimize_support_properties_ABTConfigProperties()
return apptimize_support_properties_ABTConfigProperties._instance
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_support_properties_ABTConfigProperties._hx_class = apptimize_support_properties_ABTConfigProperties
_hx_classes["apptimize.support.properties.ABTConfigProperties"] = apptimize_support_properties_ABTConfigProperties
class apptimize_support_properties_CustomPropertyNamespace(Enum):
__slots__ = ()
_hx_class_name = "apptimize.support.properties.CustomPropertyNamespace"
_hx_constructs = ["UserAttribute", "ApptimizeLocal", "ApptimizeInternal", "Mixpanel"]
apptimize_support_properties_CustomPropertyNamespace.UserAttribute = apptimize_support_properties_CustomPropertyNamespace("UserAttribute", 0, ())
apptimize_support_properties_CustomPropertyNamespace.ApptimizeLocal = apptimize_support_properties_CustomPropertyNamespace("ApptimizeLocal", 1, ())
apptimize_support_properties_CustomPropertyNamespace.ApptimizeInternal = apptimize_support_properties_CustomPropertyNamespace("ApptimizeInternal", 2, ())
apptimize_support_properties_CustomPropertyNamespace.Mixpanel = apptimize_support_properties_CustomPropertyNamespace("Mixpanel", 3, ())
apptimize_support_properties_CustomPropertyNamespace._hx_class = apptimize_support_properties_CustomPropertyNamespace
_hx_classes["apptimize.support.properties.CustomPropertyNamespace"] = apptimize_support_properties_CustomPropertyNamespace
class apptimize_support_properties_ABTCustomProperties(apptimize_support_properties_ABTProperties):
_hx_class_name = "apptimize.support.properties.ABTCustomProperties"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["setPropertyDefaults", "setProperty", "setPropertyForNamespace", "sigilForNamespace", "valueForNamespacedProperty", "addJSONProperties"]
_hx_statics = []
_hx_interfaces = []
_hx_super = apptimize_support_properties_ABTProperties
def __init__(self):
super().__init__()
def setPropertyDefaults(self):
pass
def setProperty(self,key,value):
self.setPropertyForNamespace(key,value,apptimize_support_properties_CustomPropertyNamespace.UserAttribute)
def setPropertyForNamespace(self,key,value,namespace):
super().setProperty((HxOverrides.stringOrNull(self.sigilForNamespace(namespace)) + ("null" if key is None else key)),value)
def sigilForNamespace(self,namespace):
tmp = namespace.index
if (tmp == 0):
return "%"
elif (tmp == 1):
return "l"
elif (tmp == 2):
return "^"
elif (tmp == 3):
return "m"
else:
pass
def valueForNamespacedProperty(self,propertyName,namespace):
return super().valueForProperty((HxOverrides.stringOrNull(self.sigilForNamespace(namespace)) + ("null" if propertyName is None else propertyName)))
def addJSONProperties(self,jsonProperties):
key = self.availableProperties.keys()
while key.hasNext():
key1 = key.next()
if ((("" if ((0 >= len(key1))) else key1[0])) != self.sigilForNamespace(apptimize_support_properties_CustomPropertyNamespace.ApptimizeLocal)):
v = self.availableProperties.h.get(key1,None)
jsonProperties.h[key1] = v
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_support_properties_ABTCustomProperties._hx_class = apptimize_support_properties_ABTCustomProperties
_hx_classes["apptimize.support.properties.ABTCustomProperties"] = apptimize_support_properties_ABTCustomProperties
class apptimize_support_properties_ABTInternalProperties(apptimize_support_properties_ABTCustomProperties):
_hx_class_name = "apptimize.support.properties.ABTInternalProperties"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["isPropertyAvailable", "valueForProperty", "setProperty", "_loadProperties", "_saveProperties", "setPropertyForNamespace", "valueForNamespacedProperty"]
_hx_statics = ["_instance", "sharedInstance"]
_hx_interfaces = []
_hx_super = apptimize_support_properties_ABTCustomProperties
def __init__(self):
super().__init__()
def isPropertyAvailable(self,propertyName):
self._loadProperties()
return super().isPropertyAvailable(propertyName)
def valueForProperty(self,propertyName):
self._loadProperties()
return self.availableProperties.h.get(propertyName,None)
def setProperty(self,key,value):
self.setPropertyForNamespace(key,value,apptimize_support_properties_CustomPropertyNamespace.ApptimizeInternal)
def _loadProperties(self):
if (self.availableProperties is None):
self.availableProperties = haxe_ds_StringMap()
def _saveProperties(self):
pass
def setPropertyForNamespace(self,key,value,namespace):
self._loadProperties()
super().setPropertyForNamespace(key,value,namespace)
self._saveProperties()
def valueForNamespacedProperty(self,propertyName,namespace):
self._loadProperties()
return super().valueForNamespacedProperty(propertyName,namespace)
_instance = None
@staticmethod
def sharedInstance():
if (apptimize_support_properties_ABTInternalProperties._instance is None):
apptimize_support_properties_ABTInternalProperties._instance = apptimize_support_properties_ABTInternalProperties()
return apptimize_support_properties_ABTInternalProperties._instance
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_support_properties_ABTInternalProperties._hx_class = apptimize_support_properties_ABTInternalProperties
_hx_classes["apptimize.support.properties.ABTInternalProperties"] = apptimize_support_properties_ABTInternalProperties
class apptimize_util_DefaultPlatformLock:
_hx_class_name = "apptimize.util.DefaultPlatformLock"
_hx_is_interface = "False"
__slots__ = ()
_hx_methods = ["acquire", "release", "hxUnserialize"]
_hx_interfaces = [apptimize_util_PlatformLock]
def __init__(self):
pass
def acquire(self):
return True
def release(self):
return
def hxUnserialize(self,u):
pass
@staticmethod
def _hx_empty_init(_hx_o): pass
apptimize_util_DefaultPlatformLock._hx_class = apptimize_util_DefaultPlatformLock
_hx_classes["apptimize.util.DefaultPlatformLock"] = apptimize_util_DefaultPlatformLock
class apptimize_util_ABTDispatchTask:
_hx_class_name = "apptimize.util.ABTDispatchTask"
_hx_is_interface = "False"
__slots__ = ("task", "startTimestampMs")
_hx_fields = ["task", "startTimestampMs"]
def __init__(self,task,delay):
self.task = task
self.startTimestampMs = ((Date.now().date.timestamp() * 1000) + delay)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.task = None
_hx_o.startTimestampMs = None
apptimize_util_ABTDispatchTask._hx_class = apptimize_util_ABTDispatchTask
_hx_classes["apptimize.util.ABTDispatchTask"] = apptimize_util_ABTDispatchTask
class apptimize_util_ABTException:
_hx_class_name = "apptimize.util.ABTException"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["throwException"]
@staticmethod
def throwException(message):
if apptimize_support_properties_ABTConfigProperties.sharedInstance().valueForProperty(apptimize_support_properties_ABTConfigProperties.EXCEPTIONS_ENABLED_KEY):
raise haxe_Exception.thrown(message)
apptimize_util_ABTException._hx_class = apptimize_util_ABTException
_hx_classes["apptimize.util.ABTException"] = apptimize_util_ABTException
class apptimize_util_ABTHash:
_hx_class_name = "apptimize.util.ABTHash"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["Sha1"]
@staticmethod
def Sha1(obj):
return haxe_crypto_Sha1.make(obj)
apptimize_util_ABTHash._hx_class = apptimize_util_ABTHash
_hx_classes["apptimize.util.ABTHash"] = apptimize_util_ABTHash
class apptimize_util_ABTInt64Utils:
_hx_class_name = "apptimize.util.ABTInt64Utils"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["toPreprocessedString", "_serializeInt64", "_deserializeInt64"]
@staticmethod
def toPreprocessedString(number):
return (("wideInt_" + HxOverrides.stringOrNull(haxe__Int64_Int64_Impl_.toString(number))) + "_wideInt")
@staticmethod
def _serializeInt64(value,s):
s.serialize(value.high)
s.serialize(value.low)
@staticmethod
def _deserializeInt64(u):
this1 = haxe__Int64____Int64(u.unserialize(),u.unserialize())
return this1
apptimize_util_ABTInt64Utils._hx_class = apptimize_util_ABTInt64Utils
_hx_classes["apptimize.util.ABTInt64Utils"] = apptimize_util_ABTInt64Utils
class apptimize_util_ABTJSONUtils:
_hx_class_name = "apptimize.util.ABTJSONUtils"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["stringify"]
@staticmethod
def stringify(json):
jsonString = haxe_format_JsonPrinter.print(json,None,None)
jsonString = StringTools.replace(jsonString,"\"wideInt_","")
jsonString = StringTools.replace(jsonString,"_wideInt\"","")
return jsonString
apptimize_util_ABTJSONUtils._hx_class = apptimize_util_ABTJSONUtils
_hx_classes["apptimize.util.ABTJSONUtils"] = apptimize_util_ABTJSONUtils
class apptimize_util_ABTLRUCache:
_hx_class_name = "apptimize.util.ABTLRUCache"
_hx_is_interface = "False"
__slots__ = ("_cacheSize", "_list", "_map", "cacheLock")
_hx_fields = ["_cacheSize", "_list", "_map", "cacheLock"]
_hx_methods = ["clear", "hasKey", "getValue", "remove", "insert", "hxSerialize", "hxUnserialize", "didUnserialize", "initMissingFields"]
def __init__(self,cacheSize):
self._map = None
self._list = None
self.cacheLock = apptimize_util_ABTDataLock.getNewLock("ABTLRUCache_lock")
self._cacheSize = cacheSize
self.clear()
def clear(self,callback = None,dispatchQueue = None):
self.cacheLock.acquire()
try:
if (callback is not None):
_g = 0
_g1 = self._list
while (_g < len(_g1)):
id = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
value = [self._map.h.get(id,None)]
def _hx_local_2(value):
def _hx_local_1():
callback((value[0] if 0 < len(value) else None))
return _hx_local_1
task = _hx_local_2(value)
if (dispatchQueue is not None):
dispatchQueue.dispatch(task,0)
else:
apptimize_util_ABTDispatch.dispatchImmediately(task)
self._list = list()
self._map = haxe_ds_StringMap()
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.cacheLock.release()
raise haxe_Exception.thrown(e)
self.cacheLock.release()
def hasKey(self,key):
result = False
self.cacheLock.acquire()
try:
result = (self._map.h.get(key,None) is not None)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.cacheLock.release()
raise haxe_Exception.thrown(e)
self.cacheLock.release()
return result
def getValue(self,key):
result = None
self.cacheLock.acquire()
try:
if self.hasKey(key):
result = self._map.h.get(key,None)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.cacheLock.release()
raise haxe_Exception.thrown(e)
self.cacheLock.release()
return result
def remove(self,key,callback = None,dispatchQueue = None):
self.cacheLock.acquire()
try:
if (not self.hasKey(key)):
return
if (callback is not None):
value = self._map.h.get(key,None)
def _hx_local_0():
callback(value)
task = _hx_local_0
if (dispatchQueue is not None):
dispatchQueue.dispatch(task,0)
else:
apptimize_util_ABTDispatch.dispatchImmediately(task)
python_internal_ArrayImpl.remove(self._list,key)
self._map.remove(key)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.cacheLock.release()
raise haxe_Exception.thrown(e)
self.cacheLock.release()
def insert(self,key,value,callback = None,dispatchQueue = None):
self.cacheLock.acquire()
try:
if self.hasKey(key):
self._map.h[key] = value
python_internal_ArrayImpl.remove(self._list,key)
_this = self._list
_this.append(key)
else:
self._map.h[key] = value
_this = self._list
_this.append(key)
if (len(self._list) > self._cacheSize):
_this = self._list
id = (None if ((len(_this) == 0)) else _this.pop(0))
if (callback is not None):
value = self._map.h.get(id,None)
def _hx_local_0():
callback(value)
task = _hx_local_0
if (dispatchQueue is not None):
dispatchQueue.dispatch(task,0)
else:
apptimize_util_ABTDispatch.dispatchImmediately(task)
self._map.remove(id)
except BaseException as _g:
None
e = haxe_Exception.caught(_g).unwrap()
self.cacheLock.release()
raise haxe_Exception.thrown(e)
self.cacheLock.release()
def hxSerialize(self,s):
_g = haxe_ds_StringMap()
_g.h["_cacheSize"] = self._cacheSize
_g.h["_list"] = self._list
_g.h["_map"] = self._map
values = _g
s.serialize(values)
def hxUnserialize(self,u):
deserialized = u.unserialize()
self._cacheSize = deserialized.h.get("_cacheSize",None)
self._list = deserialized.h.get("_list",None)
self._map = deserialized.h.get("_map",None)
self.initMissingFields()
def didUnserialize(self):
self.initMissingFields()
def initMissingFields(self):
if (self.cacheLock is None):
self.cacheLock = apptimize_util_ABTDataLock.getNewLock("ABTLRUCache_lock")
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._cacheSize = None
_hx_o._list = None
_hx_o._map = None
_hx_o.cacheLock = None
apptimize_util_ABTLRUCache._hx_class = apptimize_util_ABTLRUCache
_hx_classes["apptimize.util.ABTLRUCache"] = apptimize_util_ABTLRUCache
class apptimize_util_ABTTimer:
_hx_class_name = "apptimize.util.ABTTimer"
_hx_is_interface = "False"
_hx_fields = ["thread", "interval", "startTime", "event"]
_hx_methods = ["stop", "run"]
def __init__(self,time_ms):
self.startTime = None
self.interval = None
self.thread = None
self.event = apptimize_native_python_Event()
_gthis = self
def _hx_local_2():
while (not _gthis.event.is_set()):
try:
_gthis1 = _gthis
def _hx_local_1():
_gthis1.startTime = (_gthis1.startTime + _gthis.interval)
return _gthis1.startTime
next = ((_hx_local_1()) - python_lib_Time.time())
if (not _gthis.event.wait(next)):
localRun = _gthis.run
if (localRun is not None):
localRun()
except BaseException as _g:
e = haxe_Exception.caught(_g).unwrap()
apptimize_ABTLogger.e(("Exception in ABTTimer: " + Std.string(e)),_hx_AnonObject({'fileName': "src/apptimize/util/ABTTimer.hx", 'lineNumber': 28, 'className': "apptimize.util.ABTTimer", 'methodName': "new"}))
worker = _hx_local_2
self.interval = (time_ms / 1000.0)
self.thread = python_lib_threading_Thread(**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'target': worker})))
self.thread.daemon = True
_hx_local_3 = self.thread
_hx_local_4 = _hx_local_3.name
_hx_local_3.name = (("null" if _hx_local_4 is None else _hx_local_4) + " ~ Apptimize Timer Thread")
_hx_local_3.name
self.startTime = python_lib_Time.time()
self.thread.start()
def stop(self):
self.run = None
self.event.set()
def run(self):
pass
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.thread = None
_hx_o.interval = None
_hx_o.startTime = None
_hx_o.event = None
apptimize_util_ABTTimer._hx_class = apptimize_util_ABTTimer
_hx_classes["apptimize.util.ABTTimer"] = apptimize_util_ABTTimer
class apptimize_util_ABTTypes:
_hx_class_name = "apptimize.util.ABTTypes"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["isString"]
@staticmethod
def isString(string):
_g = Type.typeof(string)
if (_g.index == 6):
c = _g.params[0]
if (Type.getClassName(c) == "String"):
return True
else:
return False
else:
return False
apptimize_util_ABTTypes._hx_class = apptimize_util_ABTTypes
_hx_classes["apptimize.util.ABTTypes"] = apptimize_util_ABTTypes
class apptimize_util_ArrayType(Enum):
__slots__ = ()
_hx_class_name = "apptimize.util.ArrayType"
_hx_constructs = ["Int", "Bool", "Double", "String", "VariantInfo"]
apptimize_util_ArrayType.Int = apptimize_util_ArrayType("Int", 0, ())
apptimize_util_ArrayType.Bool = apptimize_util_ArrayType("Bool", 1, ())
apptimize_util_ArrayType.Double = apptimize_util_ArrayType("Double", 2, ())
apptimize_util_ArrayType.String = apptimize_util_ArrayType("String", 3, ())
apptimize_util_ArrayType.VariantInfo = apptimize_util_ArrayType("VariantInfo", 4, ())
apptimize_util_ArrayType._hx_class = apptimize_util_ArrayType
_hx_classes["apptimize.util.ArrayType"] = apptimize_util_ArrayType
class apptimize_util_ABTUtilArray:
_hx_class_name = "apptimize.util.ABTUtilArray"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["toNativeArray"]
@staticmethod
def toNativeArray(haxeArray,_hx_type):
return haxeArray
apptimize_util_ABTUtilArray._hx_class = apptimize_util_ABTUtilArray
_hx_classes["apptimize.util.ABTUtilArray"] = apptimize_util_ABTUtilArray
class apptimize_util_ABTUtilDictionary:
_hx_class_name = "apptimize.util.ABTUtilDictionary"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["dynamicToNativeDictionary", "stringMapToNativeDictionary", "nativeObjectToStringMap", "nativeDictionaryToStringMap", "dynamicObjectToStringMap", "filterNullValues"]
@staticmethod
def dynamicToNativeDictionary(dynamicMap):
pythonDict = dict()
_hx_dict = dynamicMap
if (_hx_dict is not None):
_g = 0
_g1 = python_Boot.fields(_hx_dict)
while (_g < len(_g1)):
key = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
k = key
pythonDict[k] = Reflect.field(_hx_dict,key)
return pythonDict
@staticmethod
def stringMapToNativeDictionary(stringMap):
pythonDict = dict()
if (stringMap is not None):
key = stringMap.keys()
while key.hasNext():
key1 = key.next()
k = key1
pythonDict[k] = stringMap.h.get(key1,None)
return pythonDict
@staticmethod
def nativeObjectToStringMap(nativeMap):
if (Type.typeof(nativeMap) == ValueType.TObject):
return apptimize_util_ABTUtilDictionary.dynamicObjectToStringMap(nativeMap)
if (Type.getClass(nativeMap) == haxe_ds_StringMap):
return nativeMap
return apptimize_util_ABTUtilDictionary.nativeDictionaryToStringMap(nativeMap)
@staticmethod
def nativeDictionaryToStringMap(nativeMap):
pythonDict = nativeMap
_hx_map = haxe_ds_StringMap()
if (pythonDict is not None):
key = python_HaxeIterator(iter(pythonDict.keys()))
while key.hasNext():
key1 = key.next()
value = pythonDict.get(key1)
_hx_map.h[key1] = value
return _hx_map
@staticmethod
def dynamicObjectToStringMap(object):
_hx_map = haxe_ds_StringMap()
fields = python_Boot.fields(object)
_g = 0
while (_g < len(fields)):
field = (fields[_g] if _g >= 0 and _g < len(fields) else None)
_g = (_g + 1)
value = Reflect.getProperty(object,field)
_hx_map.h[field] = value
return _hx_map
@staticmethod
def filterNullValues(_hx_map):
result = haxe_ds_StringMap()
key = _hx_map.keys()
while key.hasNext():
key1 = key.next()
value = _hx_map.h.get(key1,None)
if (value is not None):
result.h[key1] = value
return result
apptimize_util_ABTUtilDictionary._hx_class = apptimize_util_ABTUtilDictionary
_hx_classes["apptimize.util.ABTUtilDictionary"] = apptimize_util_ABTUtilDictionary
class apptimize_util_ABTUtilGzip:
_hx_class_name = "apptimize.util.ABTUtilGzip"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["jsonSignatureLength", "decompressBytes", "decompress"]
@staticmethod
def jsonSignatureLength(_hx_bytes):
b = haxe_io_Bytes.ofData(_hx_bytes)
_hx_len = ((b.b[0] << 8) | b.b[1])
return _hx_len
@staticmethod
def decompressBytes(_hx_bytes):
arrayBuffer = haxe_io__UInt8Array_UInt8Array_Impl_.fromBytes(_hx_bytes)
inflator = pako_Inflate()
inflator.push(arrayBuffer,True)
if (inflator.err != 0):
apptimize_ABTLogger.e("Error decompressing data. ${inflator.err}): ${inflator.msg}",_hx_AnonObject({'fileName': "src/apptimize/util/ABTUtilGzip.hx", 'lineNumber': 58, 'className': "apptimize.util.ABTUtilGzip", 'methodName': "decompressBytes"}))
return None
return inflator.result.bytes
@staticmethod
def decompress(_hx_bytes):
bds = haxe_io_Bytes.ofData(_hx_bytes)
_hx_len = apptimize_util_ABTUtilGzip.jsonSignatureLength(_hx_bytes)
dataLength = bds.length
sigLength = (_hx_len + 2)
zippedLength = (dataLength - sigLength)
orig = haxe_io_Bytes.ofData(_hx_bytes)
bd = orig.sub(sigLength,zippedLength)
return apptimize_util_ABTUtilGzip.decompressBytes(bd)
apptimize_util_ABTUtilGzip._hx_class = apptimize_util_ABTUtilGzip
_hx_classes["apptimize.util.ABTUtilGzip"] = apptimize_util_ABTUtilGzip
class haxe_StackItem(Enum):
__slots__ = ()
_hx_class_name = "haxe.StackItem"
_hx_constructs = ["CFunction", "Module", "FilePos", "Method", "LocalFunction"]
@staticmethod
def Module(m):
return haxe_StackItem("Module", 1, (m,))
@staticmethod
def FilePos(s,file,line,column = None):
return haxe_StackItem("FilePos", 2, (s,file,line,column))
@staticmethod
def Method(classname,method):
return haxe_StackItem("Method", 3, (classname,method))
@staticmethod
def LocalFunction(v = None):
return haxe_StackItem("LocalFunction", 4, (v,))
haxe_StackItem.CFunction = haxe_StackItem("CFunction", 0, ())
haxe_StackItem._hx_class = haxe_StackItem
_hx_classes["haxe.StackItem"] = haxe_StackItem
class haxe__CallStack_CallStack_Impl_:
_hx_class_name = "haxe._CallStack.CallStack_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["callStack", "exceptionStack", "toString", "subtract", "equalItems", "itemToString"]
@staticmethod
def callStack():
infos = python_lib_Traceback.extract_stack()
if (len(infos) != 0):
infos.pop()
infos.reverse()
return haxe_NativeStackTrace.toHaxe(infos)
@staticmethod
def exceptionStack(fullStack = None):
if (fullStack is None):
fullStack = False
eStack = haxe_NativeStackTrace.toHaxe(haxe_NativeStackTrace.exceptionStack())
return (eStack if fullStack else haxe__CallStack_CallStack_Impl_.subtract(eStack,haxe__CallStack_CallStack_Impl_.callStack()))
@staticmethod
def toString(stack):
b = StringBuf()
_g = 0
_g1 = stack
while (_g < len(_g1)):
s = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
b.b.write("\nCalled from ")
haxe__CallStack_CallStack_Impl_.itemToString(b,s)
return b.b.getvalue()
@staticmethod
def subtract(this1,stack):
startIndex = -1
i = -1
while True:
i = (i + 1)
tmp = i
if (not ((tmp < len(this1)))):
break
_g = 0
_g1 = len(stack)
while (_g < _g1):
j = _g
_g = (_g + 1)
if haxe__CallStack_CallStack_Impl_.equalItems((this1[i] if i >= 0 and i < len(this1) else None),python_internal_ArrayImpl._get(stack, j)):
if (startIndex < 0):
startIndex = i
i = (i + 1)
if (i >= len(this1)):
break
else:
startIndex = -1
if (startIndex >= 0):
break
if (startIndex >= 0):
return this1[0:startIndex]
else:
return this1
@staticmethod
def equalItems(item1,item2):
if (item1 is None):
if (item2 is None):
return True
else:
return False
else:
tmp = item1.index
if (tmp == 0):
if (item2 is None):
return False
elif (item2.index == 0):
return True
else:
return False
elif (tmp == 1):
if (item2 is None):
return False
elif (item2.index == 1):
m2 = item2.params[0]
m1 = item1.params[0]
return (m1 == m2)
else:
return False
elif (tmp == 2):
if (item2 is None):
return False
elif (item2.index == 2):
item21 = item2.params[0]
file2 = item2.params[1]
line2 = item2.params[2]
col2 = item2.params[3]
col1 = item1.params[3]
line1 = item1.params[2]
file1 = item1.params[1]
item11 = item1.params[0]
if (((file1 == file2) and ((line1 == line2))) and ((col1 == col2))):
return haxe__CallStack_CallStack_Impl_.equalItems(item11,item21)
else:
return False
else:
return False
elif (tmp == 3):
if (item2 is None):
return False
elif (item2.index == 3):
class2 = item2.params[0]
method2 = item2.params[1]
method1 = item1.params[1]
class1 = item1.params[0]
if (class1 == class2):
return (method1 == method2)
else:
return False
else:
return False
elif (tmp == 4):
if (item2 is None):
return False
elif (item2.index == 4):
v2 = item2.params[0]
v1 = item1.params[0]
return (v1 == v2)
else:
return False
else:
pass
@staticmethod
def itemToString(b,s):
tmp = s.index
if (tmp == 0):
b.b.write("a C function")
elif (tmp == 1):
m = s.params[0]
b.b.write("module ")
s1 = Std.string(m)
b.b.write(s1)
elif (tmp == 2):
s1 = s.params[0]
file = s.params[1]
line = s.params[2]
col = s.params[3]
if (s1 is not None):
haxe__CallStack_CallStack_Impl_.itemToString(b,s1)
b.b.write(" (")
s2 = Std.string(file)
b.b.write(s2)
b.b.write(" line ")
s2 = Std.string(line)
b.b.write(s2)
if (col is not None):
b.b.write(" column ")
s2 = Std.string(col)
b.b.write(s2)
if (s1 is not None):
b.b.write(")")
elif (tmp == 3):
cname = s.params[0]
meth = s.params[1]
s1 = Std.string(("<unknown>" if ((cname is None)) else cname))
b.b.write(s1)
b.b.write(".")
s1 = Std.string(meth)
b.b.write(s1)
elif (tmp == 4):
n = s.params[0]
b.b.write("local function #")
s = Std.string(n)
b.b.write(s)
else:
pass
haxe__CallStack_CallStack_Impl_._hx_class = haxe__CallStack_CallStack_Impl_
_hx_classes["haxe._CallStack.CallStack_Impl_"] = haxe__CallStack_CallStack_Impl_
class haxe_Exception(Exception):
_hx_class_name = "haxe.Exception"
_hx_is_interface = "False"
__slots__ = ("_hx___nativeStack", "_hx___skipStack", "_hx___nativeException", "_hx___previousException")
_hx_fields = ["__nativeStack", "__skipStack", "__nativeException", "__previousException"]
_hx_methods = ["unwrap", "toString", "get_message", "get_native"]
_hx_statics = ["caught", "thrown"]
_hx_interfaces = []
_hx_super = Exception
def __init__(self,message,previous = None,native = None):
self._hx___previousException = None
self._hx___nativeException = None
self._hx___nativeStack = None
self._hx___skipStack = 0
super().__init__(message)
self._hx___previousException = previous
if ((native is not None) and Std.isOfType(native,BaseException)):
self._hx___nativeException = native
self._hx___nativeStack = haxe_NativeStackTrace.exceptionStack()
else:
self._hx___nativeException = self
infos = python_lib_Traceback.extract_stack()
if (len(infos) != 0):
infos.pop()
infos.reverse()
self._hx___nativeStack = infos
def unwrap(self):
return self._hx___nativeException
def toString(self):
return self.get_message()
def get_message(self):
return str(self)
def get_native(self):
return self._hx___nativeException
@staticmethod
def caught(value):
if Std.isOfType(value,haxe_Exception):
return value
elif Std.isOfType(value,BaseException):
return haxe_Exception(str(value),None,value)
else:
return haxe_ValueException(value,None,value)
@staticmethod
def thrown(value):
if Std.isOfType(value,haxe_Exception):
return value.get_native()
elif Std.isOfType(value,BaseException):
return value
else:
e = haxe_ValueException(value)
e._hx___skipStack = (e._hx___skipStack + 1)
return e
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._hx___nativeStack = None
_hx_o._hx___skipStack = None
_hx_o._hx___nativeException = None
_hx_o._hx___previousException = None
haxe_Exception._hx_class = haxe_Exception
_hx_classes["haxe.Exception"] = haxe_Exception
class haxe__Int32_Int32_Impl_:
_hx_class_name = "haxe._Int32.Int32_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["mul", "ucompare"]
@staticmethod
def mul(a,b):
return ((((a * ((b & 65535))) + ((((((a * (HxOverrides.rshift(b, 16))) << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
@staticmethod
def ucompare(a,b):
if (a < 0):
if (b < 0):
return (((((~b + (2 ** 31)) % (2 ** 32) - (2 ** 31)) - (((~a + (2 ** 31)) % (2 ** 32) - (2 ** 31)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
else:
return 1
if (b < 0):
return -1
else:
return (((a - b) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
haxe__Int32_Int32_Impl_._hx_class = haxe__Int32_Int32_Impl_
_hx_classes["haxe._Int32.Int32_Impl_"] = haxe__Int32_Int32_Impl_
class haxe__Int64_Int64_Impl_:
_hx_class_name = "haxe._Int64.Int64_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["toString", "divMod"]
@staticmethod
def toString(this1):
i = this1
b_high = 0
b_low = 0
if ((i.high == b_high) and ((i.low == b_low))):
return "0"
_hx_str = ""
neg = False
if (i.high < 0):
neg = True
this1 = haxe__Int64____Int64(0,10)
ten = this1
while True:
b_high = 0
b_low = 0
if (not (((i.high != b_high) or ((i.low != b_low))))):
break
r = haxe__Int64_Int64_Impl_.divMod(i,ten)
if (r.modulus.high < 0):
x = r.modulus
high = ((~x.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((~x.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low == 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this_high = high
this_low = low
_hx_str = (Std.string(this_low) + ("null" if _hx_str is None else _hx_str))
x1 = r.quotient
high1 = ((~x1.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low1 = (((~x1.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low1 == 0):
ret1 = high1
high1 = (high1 + 1)
high1 = ((high1 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high1,low1)
i = this1
else:
_hx_str = (Std.string(r.modulus.low) + ("null" if _hx_str is None else _hx_str))
i = r.quotient
if neg:
_hx_str = ("-" + ("null" if _hx_str is None else _hx_str))
return _hx_str
@staticmethod
def divMod(dividend,divisor):
if (divisor.high == 0):
_g = divisor.low
if (_g == 0):
raise haxe_Exception.thrown("divide by zero")
elif (_g == 1):
this1 = haxe__Int64____Int64(dividend.high,dividend.low)
this2 = haxe__Int64____Int64(0,0)
return _hx_AnonObject({'quotient': this1, 'modulus': this2})
else:
pass
divSign = ((dividend.high < 0) != ((divisor.high < 0)))
modulus = None
if (dividend.high < 0):
high = ((~dividend.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((~dividend.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low == 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high,low)
modulus = this1
else:
this1 = haxe__Int64____Int64(dividend.high,dividend.low)
modulus = this1
if (divisor.high < 0):
high = ((~divisor.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((~divisor.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low == 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high,low)
divisor = this1
this1 = haxe__Int64____Int64(0,0)
quotient = this1
this1 = haxe__Int64____Int64(0,1)
mask = this1
while (not ((divisor.high < 0))):
v = haxe__Int32_Int32_Impl_.ucompare(divisor.high,modulus.high)
cmp = (v if ((v != 0)) else haxe__Int32_Int32_Impl_.ucompare(divisor.low,modulus.low))
b = 1
b = (b & 63)
if (b == 0):
this1 = haxe__Int64____Int64(divisor.high,divisor.low)
divisor = this1
elif (b < 32):
this2 = haxe__Int64____Int64(((((((((divisor.high << b)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) | HxOverrides.rshift(divisor.low, ((32 - b))))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),((((divisor.low << b)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))
divisor = this2
else:
this3 = haxe__Int64____Int64(((((divisor.low << ((b - 32)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),0)
divisor = this3
b1 = 1
b1 = (b1 & 63)
if (b1 == 0):
this4 = haxe__Int64____Int64(mask.high,mask.low)
mask = this4
elif (b1 < 32):
this5 = haxe__Int64____Int64(((((((((mask.high << b1)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) | HxOverrides.rshift(mask.low, ((32 - b1))))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),((((mask.low << b1)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))
mask = this5
else:
this6 = haxe__Int64____Int64(((((mask.low << ((b1 - 32)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),0)
mask = this6
if (cmp >= 0):
break
while True:
b_high = 0
b_low = 0
if (not (((mask.high != b_high) or ((mask.low != b_low))))):
break
v = haxe__Int32_Int32_Impl_.ucompare(modulus.high,divisor.high)
if (((v if ((v != 0)) else haxe__Int32_Int32_Impl_.ucompare(modulus.low,divisor.low))) >= 0):
this1 = haxe__Int64____Int64(((((quotient.high | mask.high)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),((((quotient.low | mask.low)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))
quotient = this1
high = (((modulus.high - divisor.high) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((modulus.low - divisor.low) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(modulus.low,divisor.low) < 0):
ret = high
high = (high - 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this2 = haxe__Int64____Int64(high,low)
modulus = this2
b = 1
b = (b & 63)
if (b == 0):
this3 = haxe__Int64____Int64(mask.high,mask.low)
mask = this3
elif (b < 32):
this4 = haxe__Int64____Int64(HxOverrides.rshift(mask.high, b),((((((((mask.high << ((32 - b)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) | HxOverrides.rshift(mask.low, b))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))
mask = this4
else:
this5 = haxe__Int64____Int64(0,HxOverrides.rshift(mask.high, ((b - 32))))
mask = this5
b1 = 1
b1 = (b1 & 63)
if (b1 == 0):
this6 = haxe__Int64____Int64(divisor.high,divisor.low)
divisor = this6
elif (b1 < 32):
this7 = haxe__Int64____Int64(HxOverrides.rshift(divisor.high, b1),((((((((divisor.high << ((32 - b1)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) | HxOverrides.rshift(divisor.low, b1))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))
divisor = this7
else:
this8 = haxe__Int64____Int64(0,HxOverrides.rshift(divisor.high, ((b1 - 32))))
divisor = this8
if divSign:
high = ((~quotient.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((~quotient.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low == 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high,low)
quotient = this1
if (dividend.high < 0):
high = ((~modulus.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((~modulus.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low == 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high,low)
modulus = this1
return _hx_AnonObject({'quotient': quotient, 'modulus': modulus})
haxe__Int64_Int64_Impl_._hx_class = haxe__Int64_Int64_Impl_
_hx_classes["haxe._Int64.Int64_Impl_"] = haxe__Int64_Int64_Impl_
class haxe__Int64____Int64:
_hx_class_name = "haxe._Int64.___Int64"
_hx_is_interface = "False"
__slots__ = ("high", "low")
_hx_fields = ["high", "low"]
def __init__(self,high,low):
self.high = high
self.low = low
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.high = None
_hx_o.low = None
haxe__Int64____Int64._hx_class = haxe__Int64____Int64
_hx_classes["haxe._Int64.___Int64"] = haxe__Int64____Int64
class haxe_Int64Helper:
_hx_class_name = "haxe.Int64Helper"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["parseString", "fromFloat"]
@staticmethod
def parseString(sParam):
base_high = 0
base_low = 10
this1 = haxe__Int64____Int64(0,0)
current = this1
this1 = haxe__Int64____Int64(0,1)
multiplier = this1
sIsNegative = False
s = StringTools.trim(sParam)
if ((("" if ((0 >= len(s))) else s[0])) == "-"):
sIsNegative = True
s = HxString.substring(s,1,len(s))
_hx_len = len(s)
_g = 0
_g1 = _hx_len
while (_g < _g1):
i = _g
_g = (_g + 1)
digitInt = (HxString.charCodeAt(s,((_hx_len - 1) - i)) - 48)
if ((digitInt < 0) or ((digitInt > 9))):
raise haxe_Exception.thrown("NumberFormatError")
if (digitInt != 0):
digit_high = (digitInt >> 31)
digit_low = digitInt
if sIsNegative:
mask = 65535
al = (multiplier.low & mask)
ah = HxOverrides.rshift(multiplier.low, 16)
bl = (digit_low & mask)
bh = HxOverrides.rshift(digit_low, 16)
p00 = haxe__Int32_Int32_Impl_.mul(al,bl)
p10 = haxe__Int32_Int32_Impl_.mul(ah,bl)
p01 = haxe__Int32_Int32_Impl_.mul(al,bh)
p11 = haxe__Int32_Int32_Impl_.mul(ah,bh)
low = p00
high = ((((((p11 + (HxOverrides.rshift(p01, 16))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) + (HxOverrides.rshift(p10, 16))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
p01 = ((((p01 << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((low + p01) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low,p01) < 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
p10 = ((((p10 << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((low + p10) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low,p10) < 0):
ret1 = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
high = (((high + ((((haxe__Int32_Int32_Impl_.mul(multiplier.low,digit_high) + haxe__Int32_Int32_Impl_.mul(multiplier.high,digit_low)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
b_high = high
b_low = low
high1 = (((current.high - b_high) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low1 = (((current.low - b_low) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(current.low,b_low) < 0):
ret2 = high1
high1 = (high1 - 1)
high1 = ((high1 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high1,low1)
current = this1
if (not ((current.high < 0))):
raise haxe_Exception.thrown("NumberFormatError: Underflow")
else:
mask1 = 65535
al1 = (multiplier.low & mask1)
ah1 = HxOverrides.rshift(multiplier.low, 16)
bl1 = (digit_low & mask1)
bh1 = HxOverrides.rshift(digit_low, 16)
p001 = haxe__Int32_Int32_Impl_.mul(al1,bl1)
p101 = haxe__Int32_Int32_Impl_.mul(ah1,bl1)
p011 = haxe__Int32_Int32_Impl_.mul(al1,bh1)
p111 = haxe__Int32_Int32_Impl_.mul(ah1,bh1)
low2 = p001
high2 = ((((((p111 + (HxOverrides.rshift(p011, 16))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) + (HxOverrides.rshift(p101, 16))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
p011 = ((((p011 << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low2 = (((low2 + p011) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low2,p011) < 0):
ret3 = high2
high2 = (high2 + 1)
high2 = ((high2 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
p101 = ((((p101 << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low2 = (((low2 + p101) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low2,p101) < 0):
ret4 = high2
high2 = (high2 + 1)
high2 = ((high2 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
high2 = (((high2 + ((((haxe__Int32_Int32_Impl_.mul(multiplier.low,digit_high) + haxe__Int32_Int32_Impl_.mul(multiplier.high,digit_low)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
b_high1 = high2
b_low1 = low2
high3 = (((current.high + b_high1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low3 = (((current.low + b_low1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low3,current.low) < 0):
ret5 = high3
high3 = (high3 + 1)
high3 = ((high3 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this2 = haxe__Int64____Int64(high3,low3)
current = this2
if (current.high < 0):
raise haxe_Exception.thrown("NumberFormatError: Overflow")
mask2 = 65535
al2 = (multiplier.low & mask2)
ah2 = HxOverrides.rshift(multiplier.low, 16)
bl2 = (base_low & mask2)
bh2 = HxOverrides.rshift(base_low, 16)
p002 = haxe__Int32_Int32_Impl_.mul(al2,bl2)
p102 = haxe__Int32_Int32_Impl_.mul(ah2,bl2)
p012 = haxe__Int32_Int32_Impl_.mul(al2,bh2)
p112 = haxe__Int32_Int32_Impl_.mul(ah2,bh2)
low4 = p002
high4 = ((((((p112 + (HxOverrides.rshift(p012, 16))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) + (HxOverrides.rshift(p102, 16))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
p012 = ((((p012 << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low4 = (((low4 + p012) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low4,p012) < 0):
ret6 = high4
high4 = (high4 + 1)
high4 = ((high4 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
p102 = ((((p102 << 16)) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low4 = (((low4 + p102) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low4,p102) < 0):
ret7 = high4
high4 = (high4 + 1)
high4 = ((high4 + (2 ** 31)) % (2 ** 32) - (2 ** 31))
high4 = (((high4 + ((((haxe__Int32_Int32_Impl_.mul(multiplier.low,base_high) + haxe__Int32_Int32_Impl_.mul(multiplier.high,base_low)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this3 = haxe__Int64____Int64(high4,low4)
multiplier = this3
return current
@staticmethod
def fromFloat(f):
if (python_lib_Math.isnan(f) or (not ((((f != Math.POSITIVE_INFINITY) and ((f != Math.NEGATIVE_INFINITY))) and (not python_lib_Math.isnan(f)))))):
raise haxe_Exception.thrown("Number is NaN or Infinite")
noFractions = (f - (HxOverrides.modf(f, 1)))
if (noFractions > 9007199254740991):
raise haxe_Exception.thrown("Conversion overflow")
if (noFractions < -9007199254740991):
raise haxe_Exception.thrown("Conversion underflow")
this1 = haxe__Int64____Int64(0,0)
result = this1
neg = (noFractions < 0)
rest = (-noFractions if neg else noFractions)
i = 0
while (rest >= 1):
curr = HxOverrides.modf(rest, 2)
rest = (rest / 2)
if (curr >= 1):
a_high = 0
a_low = 1
b = i
b = (b & 63)
b1 = None
if (b == 0):
this1 = haxe__Int64____Int64(a_high,a_low)
b1 = this1
elif (b < 32):
this2 = haxe__Int64____Int64(((((((((a_high << b)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)) | HxOverrides.rshift(a_low, ((32 - b))))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),((((a_low << b)) + (2 ** 31)) % (2 ** 32) - (2 ** 31)))
b1 = this2
else:
this3 = haxe__Int64____Int64(((((a_low << ((b - 32)))) + (2 ** 31)) % (2 ** 32) - (2 ** 31)),0)
b1 = this3
high = (((result.high + b1.high) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((result.low + b1.low) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (haxe__Int32_Int32_Impl_.ucompare(low,result.low) < 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this4 = haxe__Int64____Int64(high,low)
result = this4
i = (i + 1)
if neg:
high = ((~result.high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
low = (((~result.low + 1) + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (low == 0):
ret = high
high = (high + 1)
high = ((high + (2 ** 31)) % (2 ** 32) - (2 ** 31))
this1 = haxe__Int64____Int64(high,low)
result = this1
return result
haxe_Int64Helper._hx_class = haxe_Int64Helper
_hx_classes["haxe.Int64Helper"] = haxe_Int64Helper
class haxe_Log:
_hx_class_name = "haxe.Log"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["formatOutput", "trace"]
@staticmethod
def formatOutput(v,infos):
_hx_str = Std.string(v)
if (infos is None):
return _hx_str
pstr = ((HxOverrides.stringOrNull(infos.fileName) + ":") + Std.string(infos.lineNumber))
if (Reflect.field(infos,"customParams") is not None):
_g = 0
_g1 = Reflect.field(infos,"customParams")
while (_g < len(_g1)):
v = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
_hx_str = (("null" if _hx_str is None else _hx_str) + ((", " + Std.string(v))))
return ((("null" if pstr is None else pstr) + ": ") + ("null" if _hx_str is None else _hx_str))
@staticmethod
def trace(v,infos = None):
_hx_str = haxe_Log.formatOutput(v,infos)
str1 = Std.string(_hx_str)
python_Lib.printString((("" + ("null" if str1 is None else str1)) + HxOverrides.stringOrNull(python_Lib.lineEnd)))
haxe_Log._hx_class = haxe_Log
_hx_classes["haxe.Log"] = haxe_Log
class haxe_NativeStackTrace:
_hx_class_name = "haxe.NativeStackTrace"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["saveStack", "exceptionStack", "toHaxe"]
@staticmethod
def saveStack(exception):
pass
@staticmethod
def exceptionStack():
exc = python_lib_Sys.exc_info()
if (exc[2] is not None):
infos = python_lib_Traceback.extract_tb(exc[2])
infos.reverse()
return infos
else:
return []
@staticmethod
def toHaxe(native,skip = None):
if (skip is None):
skip = 0
stack = []
_g = 0
_g1 = len(native)
while (_g < _g1):
i = _g
_g = (_g + 1)
if (skip > i):
continue
elem = (native[i] if i >= 0 and i < len(native) else None)
x = haxe_StackItem.FilePos(haxe_StackItem.Method(None,elem[2]),elem[0],elem[1])
stack.append(x)
return stack
haxe_NativeStackTrace._hx_class = haxe_NativeStackTrace
_hx_classes["haxe.NativeStackTrace"] = haxe_NativeStackTrace
class haxe_Serializer:
_hx_class_name = "haxe.Serializer"
_hx_is_interface = "False"
__slots__ = ("buf", "cache", "shash", "scount", "useCache", "useEnumIndex")
_hx_fields = ["buf", "cache", "shash", "scount", "useCache", "useEnumIndex"]
_hx_methods = ["toString", "serializeString", "serializeRef", "serializeFields", "serialize"]
_hx_statics = ["USE_CACHE", "USE_ENUM_INDEX", "BASE64", "BASE64_CODES"]
def __init__(self):
self.buf = StringBuf()
self.cache = list()
self.useCache = haxe_Serializer.USE_CACHE
self.useEnumIndex = haxe_Serializer.USE_ENUM_INDEX
self.shash = haxe_ds_StringMap()
self.scount = 0
def toString(self):
return self.buf.b.getvalue()
def serializeString(self,s):
x = self.shash.h.get(s,None)
if (x is not None):
self.buf.b.write("R")
_this = self.buf
s1 = Std.string(x)
_this.b.write(s1)
return
value = self.scount
self.scount = (self.scount + 1)
self.shash.h[s] = value
self.buf.b.write("y")
s = python_lib_urllib_Parse.quote(s,"")
_this = self.buf
s1 = Std.string(len(s))
_this.b.write(s1)
self.buf.b.write(":")
_this = self.buf
s1 = Std.string(s)
_this.b.write(s1)
def serializeRef(self,v):
_g = 0
_g1 = len(self.cache)
while (_g < _g1):
i = _g
_g = (_g + 1)
if HxOverrides.eq((self.cache[i] if i >= 0 and i < len(self.cache) else None),v):
self.buf.b.write("r")
_this = self.buf
s = Std.string(i)
_this.b.write(s)
return True
_this = self.cache
_this.append(v)
return False
def serializeFields(self,v):
_g = 0
_g1 = python_Boot.fields(v)
while (_g < len(_g1)):
f = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
self.serializeString(f)
self.serialize(Reflect.field(v,f))
self.buf.b.write("g")
def serialize(self,v):
_g = Type.typeof(v)
tmp = _g.index
if (tmp == 0):
self.buf.b.write("n")
elif (tmp == 1):
v1 = v
if (v1 == 0):
self.buf.b.write("z")
return
self.buf.b.write("i")
_this = self.buf
s = Std.string(v1)
_this.b.write(s)
elif (tmp == 2):
v1 = v
if python_lib_Math.isnan(v1):
self.buf.b.write("k")
elif (not ((((v1 != Math.POSITIVE_INFINITY) and ((v1 != Math.NEGATIVE_INFINITY))) and (not python_lib_Math.isnan(v1))))):
self.buf.b.write(("m" if ((v1 < 0)) else "p"))
else:
self.buf.b.write("d")
_this = self.buf
s = Std.string(v1)
_this.b.write(s)
elif (tmp == 3):
self.buf.b.write(("t" if v else "f"))
elif (tmp == 4):
if Std.isOfType(v,Class):
className = Type.getClassName(v)
self.buf.b.write("A")
self.serializeString(className)
elif Std.isOfType(v,Enum):
self.buf.b.write("B")
self.serializeString(Type.getEnumName(v))
else:
if (self.useCache and self.serializeRef(v)):
return
self.buf.b.write("o")
self.serializeFields(v)
elif (tmp == 5):
raise haxe_Exception.thrown("Cannot serialize function")
elif (tmp == 6):
c = _g.params[0]
if (c == str):
self.serializeString(v)
return
if (self.useCache and self.serializeRef(v)):
return
_g1 = Type.getClassName(c)
_hx_local_0 = len(_g1)
if (_hx_local_0 == 17):
if (_g1 == "haxe.ds.ObjectMap"):
self.buf.b.write("M")
v1 = v
k = v1.keys()
while k.hasNext():
k1 = k.next()
self.serialize(k1)
self.serialize(v1.h.get(k1,None))
self.buf.b.write("h")
elif (_g1 == "haxe.ds.StringMap"):
self.buf.b.write("b")
v1 = v
k = v1.keys()
while k.hasNext():
k1 = k.next()
self.serializeString(k1)
self.serialize(v1.h.get(k1,None))
self.buf.b.write("h")
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
elif (_hx_local_0 == 5):
if (_g1 == "Array"):
ucount = 0
self.buf.b.write("a")
v1 = v
l = len(v1)
_g1 = 0
_g2 = l
while (_g1 < _g2):
i = _g1
_g1 = (_g1 + 1)
if ((v1[i] if i >= 0 and i < len(v1) else None) is None):
ucount = (ucount + 1)
else:
if (ucount > 0):
if (ucount == 1):
self.buf.b.write("n")
else:
self.buf.b.write("u")
_this = self.buf
s = Std.string(ucount)
_this.b.write(s)
ucount = 0
self.serialize((v1[i] if i >= 0 and i < len(v1) else None))
if (ucount > 0):
if (ucount == 1):
self.buf.b.write("n")
else:
self.buf.b.write("u")
_this = self.buf
s = Std.string(ucount)
_this.b.write(s)
self.buf.b.write("h")
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
elif (_hx_local_0 == 4):
if (_g1 == "Date"):
d = v
self.buf.b.write("v")
_this = self.buf
s = Std.string((d.date.timestamp() * 1000))
_this.b.write(s)
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
elif (_hx_local_0 == 12):
if (_g1 == "haxe.ds.List"):
self.buf.b.write("l")
v1 = v
_g_head = v1.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
i = val
self.serialize(i)
self.buf.b.write("h")
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
elif (_hx_local_0 == 13):
if (_g1 == "haxe.io.Bytes"):
v1 = v
self.buf.b.write("s")
_this = self.buf
s = Std.string(Math.ceil(((v1.length * 8) / 6)))
_this.b.write(s)
self.buf.b.write(":")
i = 0
_hx_max = (v1.length - 2)
b64 = haxe_Serializer.BASE64_CODES
if (b64 is None):
this1 = [None]*len(haxe_Serializer.BASE64)
b64 = this1
_g1 = 0
_g2 = len(haxe_Serializer.BASE64)
while (_g1 < _g2):
i1 = _g1
_g1 = (_g1 + 1)
val = HxString.charCodeAt(haxe_Serializer.BASE64,i1)
b64[i1] = val
haxe_Serializer.BASE64_CODES = b64
while (i < _hx_max):
pos = i
i = (i + 1)
b1 = v1.b[pos]
pos1 = i
i = (i + 1)
b2 = v1.b[pos1]
pos2 = i
i = (i + 1)
b3 = v1.b[pos2]
_this = self.buf
c1 = b64[(b1 >> 2)]
s = "".join(map(chr,[c1]))
_this.b.write(s)
_this1 = self.buf
c2 = b64[((((b1 << 4) | ((b2 >> 4)))) & 63)]
s1 = "".join(map(chr,[c2]))
_this1.b.write(s1)
_this2 = self.buf
c3 = b64[((((b2 << 2) | ((b3 >> 6)))) & 63)]
s2 = "".join(map(chr,[c3]))
_this2.b.write(s2)
_this3 = self.buf
c4 = b64[(b3 & 63)]
s3 = "".join(map(chr,[c4]))
_this3.b.write(s3)
if (i == _hx_max):
pos = i
i = (i + 1)
b1 = v1.b[pos]
pos = i
i = (i + 1)
b2 = v1.b[pos]
_this = self.buf
c1 = b64[(b1 >> 2)]
s = "".join(map(chr,[c1]))
_this.b.write(s)
_this = self.buf
c1 = b64[((((b1 << 4) | ((b2 >> 4)))) & 63)]
s = "".join(map(chr,[c1]))
_this.b.write(s)
_this = self.buf
c1 = b64[((b2 << 2) & 63)]
s = "".join(map(chr,[c1]))
_this.b.write(s)
elif (i == ((_hx_max + 1))):
pos = i
i = (i + 1)
b1 = v1.b[pos]
_this = self.buf
c1 = b64[(b1 >> 2)]
s = "".join(map(chr,[c1]))
_this.b.write(s)
_this = self.buf
c1 = b64[((b1 << 4) & 63)]
s = "".join(map(chr,[c1]))
_this.b.write(s)
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
elif (_hx_local_0 == 14):
if (_g1 == "haxe.ds.IntMap"):
self.buf.b.write("q")
v1 = v
k = v1.keys()
while k.hasNext():
k1 = k.next()
self.buf.b.write(":")
_this = self.buf
s = Std.string(k1)
_this.b.write(s)
self.serialize(v1.h.get(k1,None))
self.buf.b.write("h")
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
else:
if self.useCache:
_this = self.cache
if (len(_this) != 0):
_this.pop()
if python_Boot.hasField(v,"hxSerialize"):
self.buf.b.write("C")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
Reflect.field(v,"hxSerialize")(self)
self.buf.b.write("g")
else:
self.buf.b.write("c")
self.serializeString(Type.getClassName(c))
if self.useCache:
_this = self.cache
_this.append(v)
self.serializeFields(v)
elif (tmp == 7):
e = _g.params[0]
if self.useCache:
if self.serializeRef(v):
return
_this = self.cache
if (len(_this) != 0):
_this.pop()
_this = self.buf
s = Std.string(("j" if (self.useEnumIndex) else "w"))
_this.b.write(s)
self.serializeString(Type.getEnumName(e))
if self.useEnumIndex:
self.buf.b.write(":")
_this = self.buf
s = Std.string(v.index)
_this.b.write(s)
else:
self.serializeString(v.tag)
self.buf.b.write(":")
arr = list(v.params)
if (arr is not None):
_this = self.buf
s = Std.string(len(arr))
_this.b.write(s)
_g = 0
while (_g < len(arr)):
v1 = (arr[_g] if _g >= 0 and _g < len(arr) else None)
_g = (_g + 1)
self.serialize(v1)
else:
self.buf.b.write("0")
if self.useCache:
_this = self.cache
_this.append(v)
else:
raise haxe_Exception.thrown(("Cannot serialize " + Std.string(v)))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.buf = None
_hx_o.cache = None
_hx_o.shash = None
_hx_o.scount = None
_hx_o.useCache = None
_hx_o.useEnumIndex = None
haxe_Serializer._hx_class = haxe_Serializer
_hx_classes["haxe.Serializer"] = haxe_Serializer
class haxe_Timer:
_hx_class_name = "haxe.Timer"
_hx_is_interface = "False"
_hx_fields = ["thread", "eventHandler"]
_hx_methods = ["stop", "run"]
_hx_statics = ["delay"]
def __init__(self,time_ms):
self.eventHandler = None
self.thread = None
_gthis = self
self.thread = sys_thread__Thread_HxThread.current()
def _hx_local_0():
_gthis.run()
self.eventHandler = sys_thread__Thread_Thread_Impl_.get_events(self.thread).repeat(_hx_local_0,time_ms)
def stop(self):
sys_thread__Thread_Thread_Impl_.get_events(self.thread).cancel(self.eventHandler)
def run(self):
pass
@staticmethod
def delay(f,time_ms):
t = haxe_Timer(time_ms)
def _hx_local_0():
t.stop()
f()
t.run = _hx_local_0
return t
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.thread = None
_hx_o.eventHandler = None
haxe_Timer._hx_class = haxe_Timer
_hx_classes["haxe.Timer"] = haxe_Timer
class haxe__Unserializer_DefaultResolver:
_hx_class_name = "haxe._Unserializer.DefaultResolver"
_hx_is_interface = "False"
__slots__ = ()
_hx_methods = ["resolveClass", "resolveEnum"]
def __init__(self):
pass
def resolveClass(self,name):
return Type.resolveClass(name)
def resolveEnum(self,name):
return Type.resolveEnum(name)
@staticmethod
def _hx_empty_init(_hx_o): pass
haxe__Unserializer_DefaultResolver._hx_class = haxe__Unserializer_DefaultResolver
_hx_classes["haxe._Unserializer.DefaultResolver"] = haxe__Unserializer_DefaultResolver
class haxe_Unserializer:
_hx_class_name = "haxe.Unserializer"
_hx_is_interface = "False"
__slots__ = ("buf", "pos", "length", "cache", "scache", "resolver")
_hx_fields = ["buf", "pos", "length", "cache", "scache", "resolver"]
_hx_methods = ["readDigits", "readFloat", "unserializeObject", "unserializeEnum", "unserialize"]
_hx_statics = ["DEFAULT_RESOLVER", "BASE64", "CODES", "initCodes"]
def __init__(self,buf):
self.resolver = None
self.scache = None
self.cache = None
self.length = None
self.pos = None
self.buf = buf
self.length = len(self.buf)
self.pos = 0
self.scache = list()
self.cache = list()
r = haxe_Unserializer.DEFAULT_RESOLVER
if (r is None):
r = haxe__Unserializer_DefaultResolver()
haxe_Unserializer.DEFAULT_RESOLVER = r
self.resolver = r
def readDigits(self):
k = 0
s = False
fpos = self.pos
while True:
p = self.pos
s1 = self.buf
c = (-1 if ((p >= len(s1))) else ord(s1[p]))
if (c == -1):
break
if (c == 45):
if (self.pos != fpos):
break
s = True
_hx_local_0 = self
_hx_local_1 = _hx_local_0.pos
_hx_local_0.pos = (_hx_local_1 + 1)
_hx_local_1
continue
if ((c < 48) or ((c > 57))):
break
k = ((k * 10) + ((c - 48)))
_hx_local_2 = self
_hx_local_3 = _hx_local_2.pos
_hx_local_2.pos = (_hx_local_3 + 1)
_hx_local_3
if s:
k = (k * -1)
return k
def readFloat(self):
p1 = self.pos
while True:
p = self.pos
s = self.buf
c = (-1 if ((p >= len(s))) else ord(s[p]))
if (c == -1):
break
if ((((c >= 43) and ((c < 58))) or ((c == 101))) or ((c == 69))):
_hx_local_0 = self
_hx_local_1 = _hx_local_0.pos
_hx_local_0.pos = (_hx_local_1 + 1)
_hx_local_1
else:
break
return Std.parseFloat(HxString.substr(self.buf,p1,(self.pos - p1)))
def unserializeObject(self,o):
while True:
if (self.pos >= self.length):
raise haxe_Exception.thrown("Invalid object")
p = self.pos
s = self.buf
if (((-1 if ((p >= len(s))) else ord(s[p]))) == 103):
break
k = self.unserialize()
if (not Std.isOfType(k,str)):
raise haxe_Exception.thrown("Invalid object key")
v = self.unserialize()
field = k
setattr(o,(("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field)),v)
_hx_local_0 = self
_hx_local_1 = _hx_local_0.pos
_hx_local_0.pos = (_hx_local_1 + 1)
_hx_local_1
def unserializeEnum(self,edecl,tag):
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
if (((-1 if ((p >= len(s))) else ord(s[p]))) != 58):
raise haxe_Exception.thrown("Invalid enum format")
nargs = self.readDigits()
if (nargs == 0):
return Type.createEnum(edecl,tag)
args = list()
while True:
tmp = nargs
nargs = (nargs - 1)
if (not ((tmp > 0))):
break
x = self.unserialize()
args.append(x)
return Type.createEnum(edecl,tag,args)
def unserialize(self):
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
_g = (-1 if ((p >= len(s))) else ord(s[p]))
if (_g == 65):
name = self.unserialize()
cl = self.resolver.resolveClass(name)
if (cl is None):
raise haxe_Exception.thrown(("Class not found " + ("null" if name is None else name)))
return cl
elif (_g == 66):
name = self.unserialize()
e = self.resolver.resolveEnum(name)
if (e is None):
raise haxe_Exception.thrown(("Enum not found " + ("null" if name is None else name)))
return e
elif (_g == 67):
name = self.unserialize()
cl = self.resolver.resolveClass(name)
if (cl is None):
raise haxe_Exception.thrown(("Class not found " + ("null" if name is None else name)))
o = Type.createEmptyInstance(cl)
_this = self.cache
_this.append(o)
Reflect.field(o,"hxUnserialize")(self)
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
if (((-1 if ((p >= len(s))) else ord(s[p]))) != 103):
raise haxe_Exception.thrown("Invalid custom data")
return o
elif (_g == 77):
h = haxe_ds_ObjectMap()
_this = self.cache
_this.append(h)
buf = self.buf
while True:
p = self.pos
s = self.buf
if (not ((((-1 if ((p >= len(s))) else ord(s[p]))) != 104))):
break
s1 = self.unserialize()
h.set(s1,self.unserialize())
_hx_local_0 = self
_hx_local_1 = _hx_local_0.pos
_hx_local_0.pos = (_hx_local_1 + 1)
_hx_local_1
return h
elif (_g == 82):
n = self.readDigits()
if ((n < 0) or ((n >= len(self.scache)))):
raise haxe_Exception.thrown("Invalid string reference")
return (self.scache[n] if n >= 0 and n < len(self.scache) else None)
elif (_g == 97):
buf = self.buf
a = list()
_this = self.cache
_this.append(a)
while True:
p = self.pos
s = self.buf
c = (-1 if ((p >= len(s))) else ord(s[p]))
if (c == 104):
_hx_local_2 = self
_hx_local_3 = _hx_local_2.pos
_hx_local_2.pos = (_hx_local_3 + 1)
_hx_local_3
break
if (c == 117):
_hx_local_4 = self
_hx_local_5 = _hx_local_4.pos
_hx_local_4.pos = (_hx_local_5 + 1)
_hx_local_5
n = self.readDigits()
python_internal_ArrayImpl._set(a, ((len(a) + n) - 1), None)
else:
x = self.unserialize()
a.append(x)
return a
elif (_g == 98):
h = haxe_ds_StringMap()
_this = self.cache
_this.append(h)
buf = self.buf
while True:
p = self.pos
s = self.buf
if (not ((((-1 if ((p >= len(s))) else ord(s[p]))) != 104))):
break
s1 = self.unserialize()
value = self.unserialize()
h.h[s1] = value
_hx_local_6 = self
_hx_local_7 = _hx_local_6.pos
_hx_local_6.pos = (_hx_local_7 + 1)
_hx_local_7
return h
elif (_g == 99):
name = self.unserialize()
cl = self.resolver.resolveClass(name)
if (cl is None):
raise haxe_Exception.thrown(("Class not found " + ("null" if name is None else name)))
o = Type.createEmptyInstance(cl)
_this = self.cache
_this.append(o)
self.unserializeObject(o)
return o
elif (_g == 100):
return self.readFloat()
elif (_g == 102):
return False
elif (_g == 105):
return self.readDigits()
elif (_g == 106):
name = self.unserialize()
edecl = self.resolver.resolveEnum(name)
if (edecl is None):
raise haxe_Exception.thrown(("Enum not found " + ("null" if name is None else name)))
_hx_local_8 = self
_hx_local_9 = _hx_local_8.pos
_hx_local_8.pos = (_hx_local_9 + 1)
_hx_local_9
index = self.readDigits()
tag = python_internal_ArrayImpl._get(Type.getEnumConstructs(edecl), index)
if (tag is None):
raise haxe_Exception.thrown(((("Unknown enum index " + ("null" if name is None else name)) + "@") + Std.string(index)))
e = self.unserializeEnum(edecl,tag)
_this = self.cache
_this.append(e)
return e
elif (_g == 107):
return Math.NaN
elif (_g == 108):
l = haxe_ds_List()
_this = self.cache
_this.append(l)
buf = self.buf
while True:
p = self.pos
s = self.buf
if (not ((((-1 if ((p >= len(s))) else ord(s[p]))) != 104))):
break
l.add(self.unserialize())
_hx_local_10 = self
_hx_local_11 = _hx_local_10.pos
_hx_local_10.pos = (_hx_local_11 + 1)
_hx_local_11
return l
elif (_g == 109):
return Math.NEGATIVE_INFINITY
elif (_g == 110):
return None
elif (_g == 111):
o = _hx_AnonObject({})
_this = self.cache
_this.append(o)
self.unserializeObject(o)
return o
elif (_g == 112):
return Math.POSITIVE_INFINITY
elif (_g == 113):
h = haxe_ds_IntMap()
_this = self.cache
_this.append(h)
buf = self.buf
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
c = (-1 if ((p >= len(s))) else ord(s[p]))
while (c == 58):
i = self.readDigits()
h.set(i,self.unserialize())
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
c = (-1 if ((p >= len(s))) else ord(s[p]))
if (c != 104):
raise haxe_Exception.thrown("Invalid IntMap format")
return h
elif (_g == 114):
n = self.readDigits()
if ((n < 0) or ((n >= len(self.cache)))):
raise haxe_Exception.thrown("Invalid reference")
return (self.cache[n] if n >= 0 and n < len(self.cache) else None)
elif (_g == 115):
_hx_len = self.readDigits()
buf = self.buf
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
if ((((-1 if ((p >= len(s))) else ord(s[p]))) != 58) or (((self.length - self.pos) < _hx_len))):
raise haxe_Exception.thrown("Invalid bytes length")
codes = haxe_Unserializer.CODES
if (codes is None):
codes = haxe_Unserializer.initCodes()
haxe_Unserializer.CODES = codes
i = self.pos
rest = (_hx_len & 3)
size = ((((_hx_len >> 2)) * 3) + (((rest - 1) if ((rest >= 2)) else 0)))
_hx_max = (i + ((_hx_len - rest)))
_hx_bytes = haxe_io_Bytes.alloc(size)
bpos = 0
while (i < _hx_max):
index = i
i = (i + 1)
c1 = python_internal_ArrayImpl._get(codes, (-1 if ((index >= len(buf))) else ord(buf[index])))
index1 = i
i = (i + 1)
c2 = python_internal_ArrayImpl._get(codes, (-1 if ((index1 >= len(buf))) else ord(buf[index1])))
pos = bpos
bpos = (bpos + 1)
_hx_bytes.b[pos] = ((((c1 << 2) | ((c2 >> 4)))) & 255)
index2 = i
i = (i + 1)
c3 = python_internal_ArrayImpl._get(codes, (-1 if ((index2 >= len(buf))) else ord(buf[index2])))
pos1 = bpos
bpos = (bpos + 1)
_hx_bytes.b[pos1] = ((((c2 << 4) | ((c3 >> 2)))) & 255)
index3 = i
i = (i + 1)
c4 = python_internal_ArrayImpl._get(codes, (-1 if ((index3 >= len(buf))) else ord(buf[index3])))
pos2 = bpos
bpos = (bpos + 1)
_hx_bytes.b[pos2] = ((((c3 << 6) | c4)) & 255)
if (rest >= 2):
index = i
i = (i + 1)
c1 = python_internal_ArrayImpl._get(codes, (-1 if ((index >= len(buf))) else ord(buf[index])))
index = i
i = (i + 1)
c2 = python_internal_ArrayImpl._get(codes, (-1 if ((index >= len(buf))) else ord(buf[index])))
pos = bpos
bpos = (bpos + 1)
_hx_bytes.b[pos] = ((((c1 << 2) | ((c2 >> 4)))) & 255)
if (rest == 3):
index = i
i = (i + 1)
c3 = python_internal_ArrayImpl._get(codes, (-1 if ((index >= len(buf))) else ord(buf[index])))
pos = bpos
bpos = (bpos + 1)
_hx_bytes.b[pos] = ((((c2 << 4) | ((c3 >> 2)))) & 255)
_hx_local_12 = self
_hx_local_13 = _hx_local_12.pos
_hx_local_12.pos = (_hx_local_13 + _hx_len)
_hx_local_12.pos
_this = self.cache
_this.append(_hx_bytes)
return _hx_bytes
elif (_g == 116):
return True
elif (_g == 118):
d = None
tmp = None
tmp1 = None
tmp2 = None
tmp3 = None
tmp4 = None
tmp5 = None
tmp6 = None
tmp7 = None
p = self.pos
s = self.buf
if (((-1 if ((p >= len(s))) else ord(s[p]))) >= 48):
p = self.pos
s = self.buf
tmp7 = (((-1 if ((p >= len(s))) else ord(s[p]))) <= 57)
else:
tmp7 = False
if tmp7:
p = (self.pos + 1)
s = self.buf
tmp6 = (((-1 if ((p >= len(s))) else ord(s[p]))) >= 48)
else:
tmp6 = False
if tmp6:
p = (self.pos + 1)
s = self.buf
tmp5 = (((-1 if ((p >= len(s))) else ord(s[p]))) <= 57)
else:
tmp5 = False
if tmp5:
p = (self.pos + 2)
s = self.buf
tmp4 = (((-1 if ((p >= len(s))) else ord(s[p]))) >= 48)
else:
tmp4 = False
if tmp4:
p = (self.pos + 2)
s = self.buf
tmp3 = (((-1 if ((p >= len(s))) else ord(s[p]))) <= 57)
else:
tmp3 = False
if tmp3:
p = (self.pos + 3)
s = self.buf
tmp2 = (((-1 if ((p >= len(s))) else ord(s[p]))) >= 48)
else:
tmp2 = False
if tmp2:
p = (self.pos + 3)
s = self.buf
tmp1 = (((-1 if ((p >= len(s))) else ord(s[p]))) <= 57)
else:
tmp1 = False
if tmp1:
p = (self.pos + 4)
s = self.buf
tmp = (((-1 if ((p >= len(s))) else ord(s[p]))) == 45)
else:
tmp = False
if tmp:
d = Date.fromString(HxString.substr(self.buf,self.pos,19))
_hx_local_14 = self
_hx_local_15 = _hx_local_14.pos
_hx_local_14.pos = (_hx_local_15 + 19)
_hx_local_14.pos
else:
d = Date.fromTime(self.readFloat())
_this = self.cache
_this.append(d)
return d
elif (_g == 119):
name = self.unserialize()
edecl = self.resolver.resolveEnum(name)
if (edecl is None):
raise haxe_Exception.thrown(("Enum not found " + ("null" if name is None else name)))
e = self.unserializeEnum(edecl,self.unserialize())
_this = self.cache
_this.append(e)
return e
elif (_g == 120):
raise haxe_Exception.thrown(self.unserialize())
elif (_g == 121):
_hx_len = self.readDigits()
p = self.pos
self.pos = (self.pos + 1)
s = self.buf
if ((((-1 if ((p >= len(s))) else ord(s[p]))) != 58) or (((self.length - self.pos) < _hx_len))):
raise haxe_Exception.thrown("Invalid string length")
s = HxString.substr(self.buf,self.pos,_hx_len)
_hx_local_16 = self
_hx_local_17 = _hx_local_16.pos
_hx_local_16.pos = (_hx_local_17 + _hx_len)
_hx_local_16.pos
s = python_lib_urllib_Parse.unquote(s)
_this = self.scache
_this.append(s)
return s
elif (_g == 122):
return 0
else:
pass
_hx_local_18 = self
_hx_local_19 = _hx_local_18.pos
_hx_local_18.pos = (_hx_local_19 - 1)
_hx_local_19
s = self.buf
pos = self.pos
raise haxe_Exception.thrown(((("Invalid char " + HxOverrides.stringOrNull((("" if (((pos < 0) or ((pos >= len(s))))) else s[pos])))) + " at position ") + Std.string(self.pos)))
@staticmethod
def initCodes():
codes = list()
_g = 0
_g1 = len(haxe_Unserializer.BASE64)
while (_g < _g1):
i = _g
_g = (_g + 1)
s = haxe_Unserializer.BASE64
python_internal_ArrayImpl._set(codes, (-1 if ((i >= len(s))) else ord(s[i])), i)
return codes
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.buf = None
_hx_o.pos = None
_hx_o.length = None
_hx_o.cache = None
_hx_o.scache = None
_hx_o.resolver = None
haxe_Unserializer._hx_class = haxe_Unserializer
_hx_classes["haxe.Unserializer"] = haxe_Unserializer
class haxe_ValueException(haxe_Exception):
_hx_class_name = "haxe.ValueException"
_hx_is_interface = "False"
__slots__ = ("value",)
_hx_fields = ["value"]
_hx_methods = ["unwrap"]
_hx_statics = []
_hx_interfaces = []
_hx_super = haxe_Exception
def __init__(self,value,previous = None,native = None):
self.value = None
super().__init__(Std.string(value),previous,native)
self.value = value
def unwrap(self):
return self.value
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.value = None
haxe_ValueException._hx_class = haxe_ValueException
_hx_classes["haxe.ValueException"] = haxe_ValueException
class haxe_io_Bytes:
_hx_class_name = "haxe.io.Bytes"
_hx_is_interface = "False"
__slots__ = ("length", "b")
_hx_fields = ["length", "b"]
_hx_methods = ["blit", "sub", "getString", "toString"]
_hx_statics = ["alloc", "ofString", "ofData"]
def __init__(self,length,b):
self.length = length
self.b = b
def blit(self,pos,src,srcpos,_hx_len):
if (((((pos < 0) or ((srcpos < 0))) or ((_hx_len < 0))) or (((pos + _hx_len) > self.length))) or (((srcpos + _hx_len) > src.length))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
self.b[pos:pos+_hx_len] = src.b[srcpos:srcpos+_hx_len]
def sub(self,pos,_hx_len):
if (((pos < 0) or ((_hx_len < 0))) or (((pos + _hx_len) > self.length))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
return haxe_io_Bytes(_hx_len,self.b[pos:(pos + _hx_len)])
def getString(self,pos,_hx_len,encoding = None):
tmp = (encoding is None)
if (((pos < 0) or ((_hx_len < 0))) or (((pos + _hx_len) > self.length))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
return self.b[pos:pos+_hx_len].decode('UTF-8','replace')
def toString(self):
return self.getString(0,self.length)
@staticmethod
def alloc(length):
return haxe_io_Bytes(length,bytearray(length))
@staticmethod
def ofString(s,encoding = None):
b = bytearray(s,"UTF-8")
return haxe_io_Bytes(len(b),b)
@staticmethod
def ofData(b):
return haxe_io_Bytes(len(b),b)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.length = None
_hx_o.b = None
haxe_io_Bytes._hx_class = haxe_io_Bytes
_hx_classes["haxe.io.Bytes"] = haxe_io_Bytes
class haxe_crypto_BaseCode:
_hx_class_name = "haxe.crypto.BaseCode"
_hx_is_interface = "False"
__slots__ = ("base", "nbits", "tbl")
_hx_fields = ["base", "nbits", "tbl"]
_hx_methods = ["initTable", "decodeBytes"]
def __init__(self,base):
self.tbl = None
_hx_len = base.length
nbits = 1
while (_hx_len > ((1 << nbits))):
nbits = (nbits + 1)
if ((nbits > 8) or ((_hx_len != ((1 << nbits))))):
raise haxe_Exception.thrown("BaseCode : base length must be a power of two.")
self.base = base
self.nbits = nbits
def initTable(self):
tbl = list()
_g = 0
while (_g < 256):
i = _g
_g = (_g + 1)
python_internal_ArrayImpl._set(tbl, i, -1)
_g = 0
_g1 = self.base.length
while (_g < _g1):
i = _g
_g = (_g + 1)
python_internal_ArrayImpl._set(tbl, self.base.b[i], i)
self.tbl = tbl
def decodeBytes(self,b):
nbits = self.nbits
base = self.base
if (self.tbl is None):
self.initTable()
tbl = self.tbl
size = ((b.length * nbits) >> 3)
out = haxe_io_Bytes.alloc(size)
buf = 0
curbits = 0
pin = 0
pout = 0
while (pout < size):
while (curbits < 8):
curbits = (curbits + nbits)
buf = (buf << nbits)
pos = pin
pin = (pin + 1)
i = python_internal_ArrayImpl._get(tbl, b.b[pos])
if (i == -1):
raise haxe_Exception.thrown("BaseCode : invalid encoded char")
buf = (buf | i)
curbits = (curbits - 8)
pos1 = pout
pout = (pout + 1)
out.b[pos1] = (((buf >> curbits) & 255) & 255)
return out
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.base = None
_hx_o.nbits = None
_hx_o.tbl = None
haxe_crypto_BaseCode._hx_class = haxe_crypto_BaseCode
_hx_classes["haxe.crypto.BaseCode"] = haxe_crypto_BaseCode
class haxe_crypto_Sha1:
_hx_class_name = "haxe.crypto.Sha1"
_hx_is_interface = "False"
__slots__ = ()
_hx_methods = ["doEncode", "ft", "kt"]
_hx_statics = ["make", "bytes2blks"]
def __init__(self):
pass
def doEncode(self,x):
w = list()
a = 1732584193
b = -271733879
c = -1732584194
d = 271733878
e = -1009589776
i = 0
while (i < len(x)):
olda = a
oldb = b
oldc = c
oldd = d
olde = e
j = 0
while (j < 80):
if (j < 16):
python_internal_ArrayImpl._set(w, j, python_internal_ArrayImpl._get(x, (i + j)))
else:
num = (((python_internal_ArrayImpl._get(w, (j - 3)) ^ python_internal_ArrayImpl._get(w, (j - 8))) ^ python_internal_ArrayImpl._get(w, (j - 14))) ^ python_internal_ArrayImpl._get(w, (j - 16)))
python_internal_ArrayImpl._set(w, j, ((num << 1) | (HxOverrides.rshift(num, 31))))
t = (((((((a << 5) | (HxOverrides.rshift(a, 27)))) + self.ft(j,b,c,d)) + e) + (w[j] if j >= 0 and j < len(w) else None)) + self.kt(j))
e = d
d = c
c = ((b << 30) | (HxOverrides.rshift(b, 2)))
b = a
a = t
j = (j + 1)
a = (a + olda)
b = (b + oldb)
c = (c + oldc)
d = (d + oldd)
e = (e + olde)
i = (i + 16)
return [a, b, c, d, e]
def ft(self,t,b,c,d):
if (t < 20):
return ((b & c) | ((~b & d)))
if (t < 40):
return ((b ^ c) ^ d)
if (t < 60):
return (((b & c) | ((b & d))) | ((c & d)))
return ((b ^ c) ^ d)
def kt(self,t):
if (t < 20):
return 1518500249
if (t < 40):
return 1859775393
if (t < 60):
return -1894007588
return -899497514
@staticmethod
def make(b):
h = haxe_crypto_Sha1().doEncode(haxe_crypto_Sha1.bytes2blks(b))
out = haxe_io_Bytes.alloc(20)
p = 0
pos = p
p = (p + 1)
out.b[pos] = (HxOverrides.rshift((h[0] if 0 < len(h) else None), 24) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[0] if 0 < len(h) else None) >> 16) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[0] if 0 < len(h) else None) >> 8) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (((h[0] if 0 < len(h) else None) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (HxOverrides.rshift((h[1] if 1 < len(h) else None), 24) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[1] if 1 < len(h) else None) >> 16) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[1] if 1 < len(h) else None) >> 8) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (((h[1] if 1 < len(h) else None) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (HxOverrides.rshift((h[2] if 2 < len(h) else None), 24) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[2] if 2 < len(h) else None) >> 16) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[2] if 2 < len(h) else None) >> 8) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (((h[2] if 2 < len(h) else None) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (HxOverrides.rshift((h[3] if 3 < len(h) else None), 24) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[3] if 3 < len(h) else None) >> 16) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[3] if 3 < len(h) else None) >> 8) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (((h[3] if 3 < len(h) else None) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (HxOverrides.rshift((h[4] if 4 < len(h) else None), 24) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[4] if 4 < len(h) else None) >> 16) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = ((((h[4] if 4 < len(h) else None) >> 8) & 255) & 255)
pos = p
p = (p + 1)
out.b[pos] = (((h[4] if 4 < len(h) else None) & 255) & 255)
return out
@staticmethod
def bytes2blks(b):
nblk = ((((b.length + 8) >> 6)) + 1)
blks = list()
_g = 0
_g1 = (nblk * 16)
while (_g < _g1):
i = _g
_g = (_g + 1)
python_internal_ArrayImpl._set(blks, i, 0)
_g = 0
_g1 = b.length
while (_g < _g1):
i = _g
_g = (_g + 1)
p = (i >> 2)
python_internal_ArrayImpl._set(blks, p, ((blks[p] if p >= 0 and p < len(blks) else None) | ((b.b[i] << ((24 - ((((i & 3)) << 3))))))))
i = b.length
p = (i >> 2)
python_internal_ArrayImpl._set(blks, p, ((blks[p] if p >= 0 and p < len(blks) else None) | ((128 << ((24 - ((((i & 3)) << 3))))))))
python_internal_ArrayImpl._set(blks, ((nblk * 16) - 1), (b.length * 8))
return blks
@staticmethod
def _hx_empty_init(_hx_o): pass
haxe_crypto_Sha1._hx_class = haxe_crypto_Sha1
_hx_classes["haxe.crypto.Sha1"] = haxe_crypto_Sha1
class haxe_ds_IntMap:
_hx_class_name = "haxe.ds.IntMap"
_hx_is_interface = "False"
__slots__ = ("h",)
_hx_fields = ["h"]
_hx_methods = ["set", "keys", "iterator"]
_hx_interfaces = [haxe_IMap]
def __init__(self):
self.h = dict()
def set(self,key,value):
self.h[key] = value
def keys(self):
return python_HaxeIterator(iter(self.h.keys()))
def iterator(self):
return python_HaxeIterator(iter(self.h.values()))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
haxe_ds_IntMap._hx_class = haxe_ds_IntMap
_hx_classes["haxe.ds.IntMap"] = haxe_ds_IntMap
class haxe_ds_List:
_hx_class_name = "haxe.ds.List"
_hx_is_interface = "False"
__slots__ = ("h", "q", "length")
_hx_fields = ["h", "q", "length"]
_hx_methods = ["add", "pop", "isEmpty", "remove"]
def __init__(self):
self.q = None
self.h = None
self.length = 0
def add(self,item):
x = haxe_ds__List_ListNode(item,None)
if (self.h is None):
self.h = x
else:
self.q.next = x
self.q = x
_hx_local_0 = self
_hx_local_1 = _hx_local_0.length
_hx_local_0.length = (_hx_local_1 + 1)
_hx_local_1
def pop(self):
if (self.h is None):
return None
x = self.h.item
self.h = self.h.next
if (self.h is None):
self.q = None
_hx_local_0 = self
_hx_local_1 = _hx_local_0.length
_hx_local_0.length = (_hx_local_1 - 1)
_hx_local_1
return x
def isEmpty(self):
return (self.h is None)
def remove(self,v):
prev = None
l = self.h
while (l is not None):
if HxOverrides.eq(l.item,v):
if (prev is None):
self.h = l.next
else:
prev.next = l.next
if (self.q == l):
self.q = prev
_hx_local_0 = self
_hx_local_1 = _hx_local_0.length
_hx_local_0.length = (_hx_local_1 - 1)
_hx_local_1
return True
prev = l
l = l.next
return False
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
_hx_o.q = None
_hx_o.length = None
haxe_ds_List._hx_class = haxe_ds_List
_hx_classes["haxe.ds.List"] = haxe_ds_List
class haxe_ds__List_ListNode:
_hx_class_name = "haxe.ds._List.ListNode"
_hx_is_interface = "False"
__slots__ = ("item", "next")
_hx_fields = ["item", "next"]
def __init__(self,item,next):
self.item = item
self.next = next
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.item = None
_hx_o.next = None
haxe_ds__List_ListNode._hx_class = haxe_ds__List_ListNode
_hx_classes["haxe.ds._List.ListNode"] = haxe_ds__List_ListNode
class haxe_ds__List_ListIterator:
_hx_class_name = "haxe.ds._List.ListIterator"
_hx_is_interface = "False"
__slots__ = ("head",)
_hx_fields = ["head"]
_hx_methods = ["hasNext", "next"]
def __init__(self,head):
self.head = head
def hasNext(self):
return (self.head is not None)
def next(self):
val = self.head.item
self.head = self.head.next
return val
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.head = None
haxe_ds__List_ListIterator._hx_class = haxe_ds__List_ListIterator
_hx_classes["haxe.ds._List.ListIterator"] = haxe_ds__List_ListIterator
class haxe_ds_ObjectMap:
_hx_class_name = "haxe.ds.ObjectMap"
_hx_is_interface = "False"
__slots__ = ("h",)
_hx_fields = ["h"]
_hx_methods = ["set", "keys"]
_hx_interfaces = [haxe_IMap]
def __init__(self):
self.h = dict()
def set(self,key,value):
self.h[key] = value
def keys(self):
return python_HaxeIterator(iter(self.h.keys()))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
haxe_ds_ObjectMap._hx_class = haxe_ds_ObjectMap
_hx_classes["haxe.ds.ObjectMap"] = haxe_ds_ObjectMap
class haxe_format_JsonPrinter:
_hx_class_name = "haxe.format.JsonPrinter"
_hx_is_interface = "False"
__slots__ = ("buf", "replacer", "indent", "pretty", "nind")
_hx_fields = ["buf", "replacer", "indent", "pretty", "nind"]
_hx_methods = ["write", "classString", "fieldsString", "quote"]
_hx_statics = ["print"]
def __init__(self,replacer,space):
self.replacer = replacer
self.indent = space
self.pretty = (space is not None)
self.nind = 0
self.buf = StringBuf()
def write(self,k,v):
if (self.replacer is not None):
v = self.replacer(k,v)
_g = Type.typeof(v)
tmp = _g.index
if (tmp == 0):
self.buf.b.write("null")
elif (tmp == 1):
_this = self.buf
s = Std.string(v)
_this.b.write(s)
elif (tmp == 2):
f = v
v1 = (Std.string(v) if ((((f != Math.POSITIVE_INFINITY) and ((f != Math.NEGATIVE_INFINITY))) and (not python_lib_Math.isnan(f)))) else "null")
_this = self.buf
s = Std.string(v1)
_this.b.write(s)
elif (tmp == 3):
_this = self.buf
s = Std.string(v)
_this.b.write(s)
elif (tmp == 4):
self.fieldsString(v,python_Boot.fields(v))
elif (tmp == 5):
self.buf.b.write("\"<fun>\"")
elif (tmp == 6):
c = _g.params[0]
if (c == str):
self.quote(v)
elif (c == list):
v1 = v
_this = self.buf
s = "".join(map(chr,[91]))
_this.b.write(s)
_hx_len = len(v1)
last = (_hx_len - 1)
_g1 = 0
_g2 = _hx_len
while (_g1 < _g2):
i = _g1
_g1 = (_g1 + 1)
if (i > 0):
_this = self.buf
s = "".join(map(chr,[44]))
_this.b.write(s)
else:
_hx_local_0 = self
_hx_local_1 = _hx_local_0.nind
_hx_local_0.nind = (_hx_local_1 + 1)
_hx_local_1
if self.pretty:
_this1 = self.buf
s1 = "".join(map(chr,[10]))
_this1.b.write(s1)
if self.pretty:
v2 = StringTools.lpad("",self.indent,(self.nind * len(self.indent)))
_this2 = self.buf
s2 = Std.string(v2)
_this2.b.write(s2)
self.write(i,(v1[i] if i >= 0 and i < len(v1) else None))
if (i == last):
_hx_local_2 = self
_hx_local_3 = _hx_local_2.nind
_hx_local_2.nind = (_hx_local_3 - 1)
_hx_local_3
if self.pretty:
_this3 = self.buf
s3 = "".join(map(chr,[10]))
_this3.b.write(s3)
if self.pretty:
v3 = StringTools.lpad("",self.indent,(self.nind * len(self.indent)))
_this4 = self.buf
s4 = Std.string(v3)
_this4.b.write(s4)
_this = self.buf
s = "".join(map(chr,[93]))
_this.b.write(s)
elif (c == haxe_ds_StringMap):
v1 = v
o = _hx_AnonObject({})
k = v1.keys()
while k.hasNext():
k1 = k.next()
value = v1.h.get(k1,None)
setattr(o,(("_hx_" + k1) if ((k1 in python_Boot.keywords)) else (("_hx_" + k1) if (((((len(k1) > 2) and ((ord(k1[0]) == 95))) and ((ord(k1[1]) == 95))) and ((ord(k1[(len(k1) - 1)]) != 95)))) else k1)),value)
v1 = o
self.fieldsString(v1,python_Boot.fields(v1))
elif (c == Date):
v1 = v
self.quote(v1.toString())
else:
self.classString(v)
elif (tmp == 7):
_g1 = _g.params[0]
i = v.index
_this = self.buf
s = Std.string(i)
_this.b.write(s)
elif (tmp == 8):
self.buf.b.write("\"???\"")
else:
pass
def classString(self,v):
self.fieldsString(v,python_Boot.getInstanceFields(Type.getClass(v)))
def fieldsString(self,v,fields):
_this = self.buf
s = "".join(map(chr,[123]))
_this.b.write(s)
_hx_len = len(fields)
last = (_hx_len - 1)
first = True
_g = 0
_g1 = _hx_len
while (_g < _g1):
i = _g
_g = (_g + 1)
f = (fields[i] if i >= 0 and i < len(fields) else None)
value = Reflect.field(v,f)
if Reflect.isFunction(value):
continue
if first:
_hx_local_0 = self
_hx_local_1 = _hx_local_0.nind
_hx_local_0.nind = (_hx_local_1 + 1)
_hx_local_1
first = False
else:
_this = self.buf
s = "".join(map(chr,[44]))
_this.b.write(s)
if self.pretty:
_this1 = self.buf
s1 = "".join(map(chr,[10]))
_this1.b.write(s1)
if self.pretty:
v1 = StringTools.lpad("",self.indent,(self.nind * len(self.indent)))
_this2 = self.buf
s2 = Std.string(v1)
_this2.b.write(s2)
self.quote(f)
_this3 = self.buf
s3 = "".join(map(chr,[58]))
_this3.b.write(s3)
if self.pretty:
_this4 = self.buf
s4 = "".join(map(chr,[32]))
_this4.b.write(s4)
self.write(f,value)
if (i == last):
_hx_local_2 = self
_hx_local_3 = _hx_local_2.nind
_hx_local_2.nind = (_hx_local_3 - 1)
_hx_local_3
if self.pretty:
_this5 = self.buf
s5 = "".join(map(chr,[10]))
_this5.b.write(s5)
if self.pretty:
v2 = StringTools.lpad("",self.indent,(self.nind * len(self.indent)))
_this6 = self.buf
s6 = Std.string(v2)
_this6.b.write(s6)
_this = self.buf
s = "".join(map(chr,[125]))
_this.b.write(s)
def quote(self,s):
_this = self.buf
s1 = "".join(map(chr,[34]))
_this.b.write(s1)
i = 0
length = len(s)
while (i < length):
index = i
i = (i + 1)
c = ord(s[index])
c1 = c
if (c1 == 8):
self.buf.b.write("\\b")
elif (c1 == 9):
self.buf.b.write("\\t")
elif (c1 == 10):
self.buf.b.write("\\n")
elif (c1 == 12):
self.buf.b.write("\\f")
elif (c1 == 13):
self.buf.b.write("\\r")
elif (c1 == 34):
self.buf.b.write("\\\"")
elif (c1 == 92):
self.buf.b.write("\\\\")
else:
_this = self.buf
s1 = "".join(map(chr,[c]))
_this.b.write(s1)
_this = self.buf
s = "".join(map(chr,[34]))
_this.b.write(s)
@staticmethod
def print(o,replacer = None,space = None):
printer = haxe_format_JsonPrinter(replacer,space)
printer.write("",o)
return printer.buf.b.getvalue()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.buf = None
_hx_o.replacer = None
_hx_o.indent = None
_hx_o.pretty = None
_hx_o.nind = None
haxe_format_JsonPrinter._hx_class = haxe_format_JsonPrinter
_hx_classes["haxe.format.JsonPrinter"] = haxe_format_JsonPrinter
class haxe_io_ArrayBufferViewImpl:
_hx_class_name = "haxe.io.ArrayBufferViewImpl"
_hx_is_interface = "False"
__slots__ = ("bytes", "byteOffset", "byteLength")
_hx_fields = ["bytes", "byteOffset", "byteLength"]
_hx_methods = ["sub", "subarray"]
def __init__(self,_hx_bytes,pos,length):
self.bytes = _hx_bytes
self.byteOffset = pos
self.byteLength = length
def sub(self,begin,length = None):
if (length is None):
length = (self.byteLength - begin)
if (((begin < 0) or ((length < 0))) or (((begin + length) > self.byteLength))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
return haxe_io_ArrayBufferViewImpl(self.bytes,(self.byteOffset + begin),length)
def subarray(self,begin = None,end = None):
if (begin is None):
begin = 0
if (end is None):
end = (self.byteLength - begin)
return self.sub(begin,(end - begin))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.bytes = None
_hx_o.byteOffset = None
_hx_o.byteLength = None
haxe_io_ArrayBufferViewImpl._hx_class = haxe_io_ArrayBufferViewImpl
_hx_classes["haxe.io.ArrayBufferViewImpl"] = haxe_io_ArrayBufferViewImpl
class haxe_io__ArrayBufferView_ArrayBufferView_Impl_:
_hx_class_name = "haxe.io._ArrayBufferView.ArrayBufferView_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["fromBytes"]
@staticmethod
def fromBytes(_hx_bytes,pos = None,length = None):
if (pos is None):
pos = 0
if (length is None):
length = (_hx_bytes.length - pos)
if (((pos < 0) or ((length < 0))) or (((pos + length) > _hx_bytes.length))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
return haxe_io_ArrayBufferViewImpl(_hx_bytes,pos,length)
haxe_io__ArrayBufferView_ArrayBufferView_Impl_._hx_class = haxe_io__ArrayBufferView_ArrayBufferView_Impl_
_hx_classes["haxe.io._ArrayBufferView.ArrayBufferView_Impl_"] = haxe_io__ArrayBufferView_ArrayBufferView_Impl_
class haxe_io_BytesBuffer:
_hx_class_name = "haxe.io.BytesBuffer"
_hx_is_interface = "False"
__slots__ = ("b",)
_hx_fields = ["b"]
_hx_methods = ["getBytes"]
def __init__(self):
self.b = bytearray()
def getBytes(self):
_hx_bytes = haxe_io_Bytes(len(self.b),self.b)
self.b = None
return _hx_bytes
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.b = None
haxe_io_BytesBuffer._hx_class = haxe_io_BytesBuffer
_hx_classes["haxe.io.BytesBuffer"] = haxe_io_BytesBuffer
class haxe_io_Input:
_hx_class_name = "haxe.io.Input"
_hx_is_interface = "False"
__slots__ = ()
haxe_io_Input._hx_class = haxe_io_Input
_hx_classes["haxe.io.Input"] = haxe_io_Input
class haxe_io_Output:
_hx_class_name = "haxe.io.Output"
_hx_is_interface = "False"
__slots__ = ()
haxe_io_Output._hx_class = haxe_io_Output
_hx_classes["haxe.io.Output"] = haxe_io_Output
class haxe_io_Encoding(Enum):
__slots__ = ()
_hx_class_name = "haxe.io.Encoding"
_hx_constructs = ["UTF8", "RawNative"]
haxe_io_Encoding.UTF8 = haxe_io_Encoding("UTF8", 0, ())
haxe_io_Encoding.RawNative = haxe_io_Encoding("RawNative", 1, ())
haxe_io_Encoding._hx_class = haxe_io_Encoding
_hx_classes["haxe.io.Encoding"] = haxe_io_Encoding
class haxe_io_Error(Enum):
__slots__ = ()
_hx_class_name = "haxe.io.Error"
_hx_constructs = ["Blocked", "Overflow", "OutsideBounds", "Custom"]
@staticmethod
def Custom(e):
return haxe_io_Error("Custom", 3, (e,))
haxe_io_Error.Blocked = haxe_io_Error("Blocked", 0, ())
haxe_io_Error.Overflow = haxe_io_Error("Overflow", 1, ())
haxe_io_Error.OutsideBounds = haxe_io_Error("OutsideBounds", 2, ())
haxe_io_Error._hx_class = haxe_io_Error
_hx_classes["haxe.io.Error"] = haxe_io_Error
class haxe_io__UInt16Array_UInt16Array_Impl_:
_hx_class_name = "haxe.io._UInt16Array.UInt16Array_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["fromArray"]
@staticmethod
def fromArray(a,pos = None,length = None):
if (pos is None):
pos = 0
if (length is None):
length = (len(a) - pos)
if (((pos < 0) or ((length < 0))) or (((pos + length) > len(a)))):
raise haxe_Exception.thrown(haxe_io_Error.OutsideBounds)
size = (len(a) * 2)
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
i = this2
_g = 0
_g1 = length
while (_g < _g1):
idx = _g
_g = (_g + 1)
value = python_internal_ArrayImpl._get(a, (idx + pos))
if ((idx >= 0) and ((idx < ((i.byteLength >> 1))))):
_this = i.bytes
pos1 = (((idx << 1)) + i.byteOffset)
_this.b[pos1] = (value & 255)
_this.b[(pos1 + 1)] = ((value >> 8) & 255)
return i
haxe_io__UInt16Array_UInt16Array_Impl_._hx_class = haxe_io__UInt16Array_UInt16Array_Impl_
_hx_classes["haxe.io._UInt16Array.UInt16Array_Impl_"] = haxe_io__UInt16Array_UInt16Array_Impl_
class haxe_io__UInt8Array_UInt8Array_Impl_:
_hx_class_name = "haxe.io._UInt8Array.UInt8Array_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["fromData", "fromBytes"]
@staticmethod
def fromData(d):
return d
@staticmethod
def fromBytes(_hx_bytes,bytePos = None,length = None):
if (bytePos is None):
bytePos = 0
return haxe_io__UInt8Array_UInt8Array_Impl_.fromData(haxe_io__ArrayBufferView_ArrayBufferView_Impl_.fromBytes(_hx_bytes,bytePos,length))
haxe_io__UInt8Array_UInt8Array_Impl_._hx_class = haxe_io__UInt8Array_UInt8Array_Impl_
_hx_classes["haxe.io._UInt8Array.UInt8Array_Impl_"] = haxe_io__UInt8Array_UInt8Array_Impl_
class haxe_iterators_ArrayIterator:
_hx_class_name = "haxe.iterators.ArrayIterator"
_hx_is_interface = "False"
__slots__ = ("array", "current")
_hx_fields = ["array", "current"]
_hx_methods = ["hasNext", "next"]
def __init__(self,array):
self.current = 0
self.array = array
def hasNext(self):
return (self.current < len(self.array))
def next(self):
def _hx_local_3():
def _hx_local_2():
_hx_local_0 = self
_hx_local_1 = _hx_local_0.current
_hx_local_0.current = (_hx_local_1 + 1)
return _hx_local_1
return python_internal_ArrayImpl._get(self.array, _hx_local_2())
return _hx_local_3()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.array = None
_hx_o.current = None
haxe_iterators_ArrayIterator._hx_class = haxe_iterators_ArrayIterator
_hx_classes["haxe.iterators.ArrayIterator"] = haxe_iterators_ArrayIterator
class haxe_iterators_ArrayKeyValueIterator:
_hx_class_name = "haxe.iterators.ArrayKeyValueIterator"
_hx_is_interface = "False"
__slots__ = ("current", "array")
_hx_fields = ["current", "array"]
_hx_methods = ["hasNext", "next"]
def __init__(self,array):
self.current = 0
self.array = array
def hasNext(self):
return (self.current < len(self.array))
def next(self):
def _hx_local_3():
def _hx_local_2():
_hx_local_0 = self
_hx_local_1 = _hx_local_0.current
_hx_local_0.current = (_hx_local_1 + 1)
return _hx_local_1
return _hx_AnonObject({'value': python_internal_ArrayImpl._get(self.array, self.current), 'key': _hx_local_2()})
return _hx_local_3()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.current = None
_hx_o.array = None
haxe_iterators_ArrayKeyValueIterator._hx_class = haxe_iterators_ArrayKeyValueIterator
_hx_classes["haxe.iterators.ArrayKeyValueIterator"] = haxe_iterators_ArrayKeyValueIterator
class hx_concurrent_ConcurrentException:
_hx_class_name = "hx.concurrent.ConcurrentException"
_hx_is_interface = "False"
__slots__ = ("cause", "causeStackTrace")
_hx_fields = ["cause", "causeStackTrace"]
_hx_methods = ["rethrow", "toString"]
def __init__(self,cause):
self.cause = cause
self.causeStackTrace = haxe__CallStack_CallStack_Impl_.exceptionStack()
def rethrow(self):
raise Exception(self.toString()) from None
def toString(self):
sb_b = python_lib_io_StringIO()
sb_b.write("rethrown exception:\n")
sb_b.write(" ")
sb_b.write("--------------------\n")
sb_b.write(" ")
sb_b.write("| Exception : ")
sb_b.write(Std.string(self.cause))
sb_b.write("\n")
_g = 0
_this = haxe__CallStack_CallStack_Impl_.toString(self.causeStackTrace)
_g1 = _this.split("\n")
while (_g < len(_g1)):
item = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
if (item == ""):
continue
sb_b.write(" ")
sb_b.write(Std.string(StringTools.replace(item,"Called from","| at")))
sb_b.write("\n")
sb_b.write(" ")
sb_b.write("--------------------")
return sb_b.getvalue()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.cause = None
_hx_o.causeStackTrace = None
hx_concurrent_ConcurrentException._hx_class = hx_concurrent_ConcurrentException
_hx_classes["hx.concurrent.ConcurrentException"] = hx_concurrent_ConcurrentException
class hx_concurrent_Future:
_hx_class_name = "hx.concurrent.Future"
_hx_is_interface = "True"
__slots__ = ()
hx_concurrent_Future._hx_class = hx_concurrent_Future
_hx_classes["hx.concurrent.Future"] = hx_concurrent_Future
class hx_concurrent_FutureResult(Enum):
__slots__ = ()
_hx_class_name = "hx.concurrent.FutureResult"
_hx_constructs = ["VALUE", "FAILURE", "PENDING"]
@staticmethod
def VALUE(result,time,future):
return hx_concurrent_FutureResult("VALUE", 0, (result,time,future))
@staticmethod
def FAILURE(ex,time,future):
return hx_concurrent_FutureResult("FAILURE", 1, (ex,time,future))
@staticmethod
def PENDING(future):
return hx_concurrent_FutureResult("PENDING", 2, (future,))
hx_concurrent_FutureResult._hx_class = hx_concurrent_FutureResult
_hx_classes["hx.concurrent.FutureResult"] = hx_concurrent_FutureResult
class hx_concurrent_AbstractFuture:
_hx_class_name = "hx.concurrent.AbstractFuture"
_hx_is_interface = "False"
__slots__ = ("completionListeners", "sync", "result")
_hx_fields = ["completionListeners", "sync", "result"]
_hx_methods = ["isComplete"]
_hx_interfaces = [hx_concurrent_Future]
def __init__(self):
self.result = None
self.sync = hx_concurrent_lock_RLock()
self.completionListeners = list()
self.result = hx_concurrent_FutureResult.PENDING(self)
def isComplete(self):
_g = self.result
if (_g.index == 2):
_g1 = _g.params[0]
return False
else:
return True
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.completionListeners = None
_hx_o.sync = None
_hx_o.result = None
hx_concurrent_AbstractFuture._hx_class = hx_concurrent_AbstractFuture
_hx_classes["hx.concurrent.AbstractFuture"] = hx_concurrent_AbstractFuture
class hx_concurrent_CompletableFuture(hx_concurrent_AbstractFuture):
_hx_class_name = "hx.concurrent.CompletableFuture"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = ["complete"]
_hx_statics = []
_hx_interfaces = []
_hx_super = hx_concurrent_AbstractFuture
def __init__(self):
super().__init__()
def complete(self,result,overwriteResult = None):
if (overwriteResult is None):
overwriteResult = False
_gthis = self
def _hx_local_2():
def _hx_local_1():
if (overwriteResult or (not _gthis.isComplete())):
_g = result
tmp = _g.index
if (tmp == 0):
value = _g.params[0]
tmp = (python_lib_Time.time() * 1000)
_gthis.result = hx_concurrent_FutureResult.VALUE(value,tmp,_gthis)
elif (tmp == 1):
ex = _g.params[0]
tmp = (python_lib_Time.time() * 1000)
_gthis.result = hx_concurrent_FutureResult.FAILURE(ex,tmp,_gthis)
else:
pass
_g = 0
_g1 = _gthis.completionListeners
while (_g < len(_g1)):
listener = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
try:
listener(_gthis.result)
except BaseException as _g2:
ex = haxe_Exception.caught(_g2)
haxe_Log.trace(ex,_hx_AnonObject({'fileName': "hx/concurrent/Future.hx", 'lineNumber': 117, 'className': "hx.concurrent.CompletableFuture", 'methodName': "complete"}))
return True
return False
return self.sync.execute(_hx_local_1)
return _hx_local_2()
@staticmethod
def _hx_empty_init(_hx_o): pass
hx_concurrent_CompletableFuture._hx_class = hx_concurrent_CompletableFuture
_hx_classes["hx.concurrent.CompletableFuture"] = hx_concurrent_CompletableFuture
class hx_concurrent_Service:
_hx_class_name = "hx.concurrent.Service"
_hx_is_interface = "True"
__slots__ = ()
hx_concurrent_Service._hx_class = hx_concurrent_Service
_hx_classes["hx.concurrent.Service"] = hx_concurrent_Service
class hx_concurrent_ServiceState(Enum):
__slots__ = ()
_hx_class_name = "hx.concurrent.ServiceState"
_hx_constructs = ["STARTING", "RUNNING", "STOPPING", "STOPPED"]
hx_concurrent_ServiceState.STARTING = hx_concurrent_ServiceState("STARTING", 0, ())
hx_concurrent_ServiceState.RUNNING = hx_concurrent_ServiceState("RUNNING", 1, ())
hx_concurrent_ServiceState.STOPPING = hx_concurrent_ServiceState("STOPPING", 2, ())
hx_concurrent_ServiceState.STOPPED = hx_concurrent_ServiceState("STOPPED", 3, ())
hx_concurrent_ServiceState._hx_class = hx_concurrent_ServiceState
_hx_classes["hx.concurrent.ServiceState"] = hx_concurrent_ServiceState
class hx_concurrent_ServiceBase:
_hx_class_name = "hx.concurrent.ServiceBase"
_hx_is_interface = "False"
__slots__ = ("id", "state", "_stateLock")
_hx_fields = ["id", "state", "_stateLock"]
_hx_methods = ["set_state", "start", "onStart", "toString"]
_hx_statics = ["_ids"]
_hx_interfaces = [hx_concurrent_Service]
def __init__(self):
self._stateLock = hx_concurrent_lock_RLock()
self.state = hx_concurrent_ServiceState.STOPPED
self.id = hx_concurrent_ServiceBase._ids.incrementAndGet()
haxe_Log.trace((("[" + Std.string(self)) + "] instantiated."),_hx_AnonObject({'fileName': "hx/concurrent/Service.hx", 'lineNumber': 53, 'className': "hx.concurrent.ServiceBase", 'methodName': "new"}))
def set_state(self,s):
tmp = s.index
if (tmp == 0):
haxe_Log.trace((("[" + Std.string(self)) + "] is starting..."),_hx_AnonObject({'fileName': "hx/concurrent/Service.hx", 'lineNumber': 42, 'className': "hx.concurrent.ServiceBase", 'methodName': "set_state"}))
elif (tmp == 1):
haxe_Log.trace((("[" + Std.string(self)) + "] is running."),_hx_AnonObject({'fileName': "hx/concurrent/Service.hx", 'lineNumber': 43, 'className': "hx.concurrent.ServiceBase", 'methodName': "set_state"}))
elif (tmp == 2):
haxe_Log.trace((("[" + Std.string(self)) + "] is stopping..."),_hx_AnonObject({'fileName': "hx/concurrent/Service.hx", 'lineNumber': 44, 'className': "hx.concurrent.ServiceBase", 'methodName': "set_state"}))
elif (tmp == 3):
haxe_Log.trace((("[" + Std.string(self)) + "] is stopped."),_hx_AnonObject({'fileName': "hx/concurrent/Service.hx", 'lineNumber': 45, 'className': "hx.concurrent.ServiceBase", 'methodName': "set_state"}))
else:
pass
def _hx_local_1():
def _hx_local_0():
self.state = s
return self.state
return _hx_local_0()
return _hx_local_1()
def start(self):
_gthis = self
def _hx_local_0():
tmp = _gthis.state.index
if (tmp == 0):
pass
elif (tmp == 1):
pass
elif (tmp == 2):
raise haxe_Exception.thrown((("Service [" + Std.string(_gthis)) + "] is currently stopping!"))
elif (tmp == 3):
_gthis.set_state(hx_concurrent_ServiceState.STARTING)
_gthis.onStart()
_gthis.set_state(hx_concurrent_ServiceState.RUNNING)
else:
pass
self._stateLock.execute(_hx_local_0)
def onStart(self):
pass
def toString(self):
return ((HxOverrides.stringOrNull(Type.getClassName(Type.getClass(self))) + "#") + Std.string(self.id))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.id = None
_hx_o.state = None
_hx_o._stateLock = None
hx_concurrent_ServiceBase._hx_class = hx_concurrent_ServiceBase
_hx_classes["hx.concurrent.ServiceBase"] = hx_concurrent_ServiceBase
class hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArray_Impl_:
_hx_class_name = "hx.concurrent.collection._CopyOnWriteArray.CopyOnWriteArray_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["_new"]
@staticmethod
def _new(initialValues = None):
this1 = hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArrayImpl()
if (initialValues is not None):
this1.addAll(initialValues)
return this1
hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArray_Impl_._hx_class = hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArray_Impl_
_hx_classes["hx.concurrent.collection._CopyOnWriteArray.CopyOnWriteArray_Impl_"] = hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArray_Impl_
class hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArrayImpl:
_hx_class_name = "hx.concurrent.collection._CopyOnWriteArray.CopyOnWriteArrayImpl"
_hx_is_interface = "False"
__slots__ = ("_items", "_sync")
_hx_fields = ["_items", "_sync"]
_hx_methods = ["addAll", "iterator"]
_hx_interfaces = [hx_concurrent_collection_OrderedCollection]
def __init__(self):
self._sync = hx_concurrent_lock_RLock()
self._items = list()
def addAll(self,coll):
_gthis = self
def _hx_local_0():
items = None
_g = coll
tmp = _g.index
if (tmp == 0):
coll1 = _g.params[0]
items = list(_gthis._items)
i = coll1.iterator()
while i.hasNext():
i1 = i.next()
items.append(i1)
elif (tmp == 1):
arr = _g.params[0]
items = (_gthis._items + arr)
elif (tmp == 2):
_hx_list = _g.params[0]
items = list(_gthis._items)
_g_head = _hx_list.h
while (_g_head is not None):
val = _g_head.item
_g_head = _g_head.next
i = val
items.append(i)
else:
pass
_gthis._items = items
self._sync.execute(_hx_local_0)
def iterator(self):
return haxe_iterators_ArrayIterator(self._items)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._items = None
_hx_o._sync = None
hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArrayImpl._hx_class = hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArrayImpl
_hx_classes["hx.concurrent.collection._CopyOnWriteArray.CopyOnWriteArrayImpl"] = hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArrayImpl
class hx_concurrent_collection_Queue:
_hx_class_name = "hx.concurrent.collection.Queue"
_hx_is_interface = "False"
__slots__ = ("_queue", "_length")
_hx_fields = ["_queue", "_length"]
_hx_methods = ["pop", "push"]
def __init__(self):
val = 0
if (val is None):
val = 0
this1 = hx_concurrent_atomic__AtomicInt_AtomicIntImpl(val)
self._length = this1
import collections
self._queue = collections.deque()
def pop(self,timeoutMS = None):
if (timeoutMS is None):
timeoutMS = 0
_gthis = self
msg = None
if (timeoutMS < -1):
raise haxe_Exception.thrown("[timeoutMS] must be >= -1")
if (timeoutMS == 0):
try:
msg = Reflect.field(self._queue,"pop")()
except BaseException as _g:
msg = None
else:
def _hx_local_0():
nonlocal msg
nonlocal msg
try:
msg = Reflect.field(_gthis._queue,"pop")()
except BaseException as _g:
msg = None
return (msg is not None)
hx_concurrent_thread_Threads._hx_await(_hx_local_0,timeoutMS)
if (msg is not None):
self._length.getAndIncrement(-1)
return msg
def push(self,msg):
if (msg is None):
raise haxe_Exception.thrown("[msg] must not be null")
Reflect.field(self._queue,"appendleft")(msg)
self._length.getAndIncrement()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._queue = None
_hx_o._length = None
hx_concurrent_collection_Queue._hx_class = hx_concurrent_collection_Queue
_hx_classes["hx.concurrent.collection.Queue"] = hx_concurrent_collection_Queue
class hx_concurrent_executor_Schedule(Enum):
__slots__ = ()
_hx_class_name = "hx.concurrent.executor.Schedule"
_hx_constructs = ["ONCE", "FIXED_DELAY", "FIXED_RATE", "HOURLY", "DAILY", "WEEKLY"]
@staticmethod
def ONCE(initialDelayMS = None):
return hx_concurrent_executor_Schedule("ONCE", 0, (initialDelayMS,))
@staticmethod
def FIXED_DELAY(intervalMS,initialDelayMS = None):
return hx_concurrent_executor_Schedule("FIXED_DELAY", 1, (intervalMS,initialDelayMS))
@staticmethod
def FIXED_RATE(intervalMS,initialDelayMS = None):
return hx_concurrent_executor_Schedule("FIXED_RATE", 2, (intervalMS,initialDelayMS))
@staticmethod
def HOURLY(minute = None,second= None):
return hx_concurrent_executor_Schedule("HOURLY", 3, (minute,second))
@staticmethod
def DAILY(hour = None,minute= None,second= None):
return hx_concurrent_executor_Schedule("DAILY", 4, (hour,minute,second))
@staticmethod
def WEEKLY(day = None,hour= None,minute= None,second= None):
return hx_concurrent_executor_Schedule("WEEKLY", 5, (day,hour,minute,second))
hx_concurrent_executor_Schedule._hx_class = hx_concurrent_executor_Schedule
_hx_classes["hx.concurrent.executor.Schedule"] = hx_concurrent_executor_Schedule
class hx_concurrent_executor_Executor(hx_concurrent_ServiceBase):
_hx_class_name = "hx.concurrent.executor.Executor"
_hx_is_interface = "False"
__slots__ = ("completionListeners",)
_hx_fields = ["completionListeners"]
_hx_methods = ["notifyResult", "submit"]
_hx_statics = ["NOW_ONCE", "create"]
_hx_interfaces = []
_hx_super = hx_concurrent_ServiceBase
def __init__(self):
self.completionListeners = hx_concurrent_collection__CopyOnWriteArray_CopyOnWriteArray_Impl_._new()
super().__init__()
def notifyResult(self,result):
listener_current = 0
listener_array = self.completionListeners._items
while (listener_current < len(listener_array)):
listener = listener_current
listener_current = (listener_current + 1)
listener1 = (listener_array[listener] if listener >= 0 and listener < len(listener_array) else None)
try:
listener1(result)
except BaseException as _g:
ex = haxe_Exception.caught(_g)
haxe_Log.trace(ex,_hx_AnonObject({'fileName': "hx/concurrent/executor/Executor.hx", 'lineNumber': 49, 'className': "hx.concurrent.executor.Executor", 'methodName': "notifyResult"}))
if (len(self.completionListeners._items) == 0):
if (result.index == 1):
_g = result.params[1]
_g = result.params[2]
ex = result.params[0]
haxe_Log.trace(ex,_hx_AnonObject({'fileName': "hx/concurrent/executor/Executor.hx", 'lineNumber': 52, 'className': "hx.concurrent.executor.Executor", 'methodName': "notifyResult"}))
@staticmethod
def create(maxConcurrent = None,autostart = None):
if (maxConcurrent is None):
maxConcurrent = 1
if (autostart is None):
autostart = True
if hx_concurrent_thread_Threads.get_isSupported():
return hx_concurrent_executor_ThreadPoolExecutor(maxConcurrent,autostart)
return hx_concurrent_executor_TimerExecutor(autostart)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.completionListeners = None
hx_concurrent_executor_Executor._hx_class = hx_concurrent_executor_Executor
_hx_classes["hx.concurrent.executor.Executor"] = hx_concurrent_executor_Executor
class hx_concurrent_executor_TaskFuture:
_hx_class_name = "hx.concurrent.executor.TaskFuture"
_hx_is_interface = "True"
__slots__ = ()
_hx_interfaces = [hx_concurrent_Future]
hx_concurrent_executor_TaskFuture._hx_class = hx_concurrent_executor_TaskFuture
_hx_classes["hx.concurrent.executor.TaskFuture"] = hx_concurrent_executor_TaskFuture
class hx_concurrent_executor_AbstractTaskFuture(hx_concurrent_CompletableFuture):
_hx_class_name = "hx.concurrent.executor.AbstractTaskFuture"
_hx_is_interface = "False"
__slots__ = ("schedule", "isStopped", "_executor", "_task")
_hx_fields = ["schedule", "isStopped", "_executor", "_task"]
_hx_methods = ["cancel"]
_hx_statics = []
_hx_interfaces = [hx_concurrent_executor_TaskFuture]
_hx_super = hx_concurrent_CompletableFuture
def __init__(self,executor,task,schedule):
self._task = None
self._executor = None
self.schedule = None
self.isStopped = False
super().__init__()
self._executor = executor
self._task = task
self.schedule = hx_concurrent_executor_ScheduleTools.assertValid(schedule)
def cancel(self):
self.isStopped = True
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.schedule = None
_hx_o.isStopped = None
_hx_o._executor = None
_hx_o._task = None
hx_concurrent_executor_AbstractTaskFuture._hx_class = hx_concurrent_executor_AbstractTaskFuture
_hx_classes["hx.concurrent.executor.AbstractTaskFuture"] = hx_concurrent_executor_AbstractTaskFuture
class hx_concurrent_executor_ScheduleTools:
_hx_class_name = "hx.concurrent.executor.ScheduleTools"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["applyDefaults", "assertValid", "firstRunAt"]
@staticmethod
def applyDefaults(schedule):
tmp = schedule.index
if (tmp == 0):
initialDelayMS = schedule.params[0]
if (initialDelayMS is None):
return hx_concurrent_executor_Schedule.ONCE(0)
elif (tmp == 1):
intervalMS = schedule.params[0]
initialDelayMS = schedule.params[1]
if (initialDelayMS is None):
return hx_concurrent_executor_Schedule.FIXED_DELAY(intervalMS,0)
elif (tmp == 2):
intervalMS = schedule.params[0]
initialDelayMS = schedule.params[1]
if (initialDelayMS is None):
return hx_concurrent_executor_Schedule.FIXED_RATE(intervalMS,0)
elif (tmp == 3):
minute = schedule.params[0]
second = schedule.params[1]
if ((minute is None) or ((second is None))):
return hx_concurrent_executor_Schedule.HOURLY((0 if ((minute is None)) else minute),(0 if ((second is None)) else second))
elif (tmp == 4):
hour = schedule.params[0]
minute = schedule.params[1]
second = schedule.params[2]
if (((hour is None) or ((minute is None))) or ((second is None))):
return hx_concurrent_executor_Schedule.DAILY((0 if ((hour is None)) else hour),(0 if ((minute is None)) else minute),(0 if ((second is None)) else second))
elif (tmp == 5):
day = schedule.params[0]
hour = schedule.params[1]
minute = schedule.params[2]
second = schedule.params[3]
if ((((day is None) or ((hour is None))) or ((minute is None))) or ((second is None))):
return hx_concurrent_executor_Schedule.WEEKLY((0 if ((day is None)) else day),(0 if ((hour is None)) else hour),(0 if ((minute is None)) else minute),(0 if ((second is None)) else second))
else:
pass
return schedule
@staticmethod
def assertValid(schedule):
schedule = hx_concurrent_executor_ScheduleTools.applyDefaults(schedule)
tmp = schedule.index
if (tmp == 0):
initialDelayMS = schedule.params[0]
if (initialDelayMS < 0):
raise haxe_Exception.thrown("[Schedule.ONCE.initialDelayMS] must be >= 0")
elif (tmp == 1):
intervalMS = schedule.params[0]
initialDelayMS = schedule.params[1]
if (intervalMS <= 0):
raise haxe_Exception.thrown("[Schedule.FIXED_DELAY.intervalMS] must be > 0")
if ((initialDelayMS is None) or ((initialDelayMS < 0))):
raise haxe_Exception.thrown("[Schedule.FIXED_DELAY.initialDelayMS] must be >= 0")
elif (tmp == 2):
intervalMS = schedule.params[0]
initialDelayMS = schedule.params[1]
if (intervalMS <= 0):
raise haxe_Exception.thrown("[Schedule.FIXED_RATE.intervalMS] must be > 0")
if (initialDelayMS < 0):
raise haxe_Exception.thrown("[Schedule.FIXED_RATE.initialDelayMS] must be >= 0")
elif (tmp == 3):
minute = schedule.params[0]
second = schedule.params[1]
if ((minute is None) or ((minute < 0))):
raise haxe_Exception.thrown("[Schedule.DAILY.minute] must be between >= 0 and <= 59")
if ((second is None) or ((second < 0))):
raise haxe_Exception.thrown("[Schedule.DAILY.second] must be between >= 0 and <= 59")
elif (tmp == 4):
hour = schedule.params[0]
minute = schedule.params[1]
second = schedule.params[2]
if ((hour is None) or ((hour < 0))):
raise haxe_Exception.thrown("[Schedule.DAILY.hour] must be between >= 0 and <= 23")
if ((minute is None) or ((minute < 0))):
raise haxe_Exception.thrown("[Schedule.DAILY.minute] must be between >= 0 and <= 59")
if ((second is None) or ((second < 0))):
raise haxe_Exception.thrown("[Schedule.DAILY.second] must be between >= 0 and <= 59")
elif (tmp == 5):
day = schedule.params[0]
hour = schedule.params[1]
minute = schedule.params[2]
second = schedule.params[3]
if ((hour is None) or ((hour < 0))):
raise haxe_Exception.thrown("[Schedule.WEEKLY.hour] must be between >= 0 and <= 23")
if ((minute is None) or ((minute < 0))):
raise haxe_Exception.thrown("[Schedule.WEEKLY.minute] must be between >= 0 and <= 59")
if ((second is None) or ((second < 0))):
raise haxe_Exception.thrown("[Schedule.WEEKLY.second] must be between >= 0 and <= 59")
else:
pass
return schedule
@staticmethod
def firstRunAt(schedule):
schedule = hx_concurrent_executor_ScheduleTools.assertValid(schedule)
tmp = schedule.index
if (tmp == 0):
initialDelayMS = schedule.params[0]
return ((python_lib_Time.time() * 1000) + initialDelayMS)
elif (tmp == 1):
intervalMS = schedule.params[0]
initialDelayMS = schedule.params[1]
return ((python_lib_Time.time() * 1000) + initialDelayMS)
elif (tmp == 2):
intervalMS = schedule.params[0]
initialDelayMS = schedule.params[1]
return ((python_lib_Time.time() * 1000) + initialDelayMS)
elif (tmp == 3):
minute = schedule.params[0]
second = schedule.params[1]
nowMS = (python_lib_Time.time() * 1000)
now = Date.fromTime(nowMS)
runAtSecondOfHour = ((minute * 60) + second)
elapsedSecondsThisHour = ((now.date.minute * 60) + now.date.second)
return ((nowMS + ((((runAtSecondOfHour - elapsedSecondsThisHour)) * 1000))) + ((3600000 if ((elapsedSecondsThisHour > runAtSecondOfHour)) else 0)))
elif (tmp == 4):
hour = schedule.params[0]
minute = schedule.params[1]
second = schedule.params[2]
nowMS = (python_lib_Time.time() * 1000)
now = Date.fromTime(nowMS)
runAtSecondOfDay = ((((hour * 60) * 60) + ((minute * 60))) + second)
elapsedSecondsToday = ((((now.date.hour * 60) * 60) + ((now.date.minute * 60))) + now.date.second)
return ((nowMS + ((((runAtSecondOfDay - elapsedSecondsToday)) * 1000))) + ((86400000 if ((elapsedSecondsToday > runAtSecondOfDay)) else 0)))
elif (tmp == 5):
day = schedule.params[0]
hour = schedule.params[1]
minute = schedule.params[2]
second = schedule.params[3]
nowMS = (python_lib_Time.time() * 1000)
now = Date.fromTime(nowMS)
runAtSecondOfDay = ((((hour * 60) * 60) + ((minute * 60))) + second)
elapsedSecondsToday = ((((now.date.hour * 60) * 60) + ((now.date.minute * 60))) + now.date.second)
dayIndex = day
if (dayIndex == (HxOverrides.mod(now.date.isoweekday(), 7))):
return ((nowMS + ((((runAtSecondOfDay - elapsedSecondsToday)) * 1000))) + ((604800000 if ((elapsedSecondsToday > runAtSecondOfDay)) else 0)))
elif (now.date.day < dayIndex):
return ((nowMS + ((((runAtSecondOfDay - elapsedSecondsToday)) * 1000))) + ((86400000 * ((dayIndex - now.date.day)))))
else:
return ((nowMS + ((((runAtSecondOfDay - elapsedSecondsToday)) * 1000))) + ((86400000 * ((7 - ((dayIndex - now.date.day)))))))
else:
pass
hx_concurrent_executor_ScheduleTools._hx_class = hx_concurrent_executor_ScheduleTools
_hx_classes["hx.concurrent.executor.ScheduleTools"] = hx_concurrent_executor_ScheduleTools
class hx_concurrent_executor_ThreadPoolExecutor(hx_concurrent_executor_Executor):
_hx_class_name = "hx.concurrent.executor.ThreadPoolExecutor"
_hx_is_interface = "False"
__slots__ = ("_threadPool", "_scheduledTasks", "_newScheduledTasks")
_hx_fields = ["_threadPool", "_scheduledTasks", "_newScheduledTasks"]
_hx_methods = ["onStart", "submit"]
_hx_statics = []
_hx_interfaces = []
_hx_super = hx_concurrent_executor_Executor
def __init__(self,threadPoolSize,autostart = None):
if (autostart is None):
autostart = True
self._threadPool = None
self._newScheduledTasks = hx_concurrent_collection_Queue()
self._scheduledTasks = list()
if (threadPoolSize < 1):
raise haxe_Exception.thrown("[threadPoolSize] must be > 0")
super().__init__()
self._threadPool = hx_concurrent_thread_ThreadPool(threadPoolSize,autostart)
if autostart:
self.start()
def onStart(self):
_gthis = self
self.set_state(hx_concurrent_ServiceState.RUNNING)
def _hx_local_7():
doneTasks = list()
while (_gthis.state == hx_concurrent_ServiceState.RUNNING):
_g = 0
_g1 = _gthis._scheduledTasks
while (_g < len(_g1)):
t = [(_g1[_g] if _g >= 0 and _g < len(_g1) else None)]
_g = (_g + 1)
if (t[0] if 0 < len(t) else None).isDue():
def _hx_local_2(t):
def _hx_local_1(ctx):
(t[0] if 0 < len(t) else None).run()
return _hx_local_1
_gthis._threadPool.submit(_hx_local_2(t))
elif (t[0] if 0 < len(t) else None).isStopped:
doneTasks.append((t[0] if 0 < len(t) else None))
if (len(doneTasks) > 0):
_g2 = 0
while (_g2 < len(doneTasks)):
t1 = (doneTasks[_g2] if _g2 >= 0 and _g2 < len(doneTasks) else None)
_g2 = (_g2 + 1)
python_internal_ArrayImpl.remove(_gthis._scheduledTasks,t1)
l = len(doneTasks)
if (l < 0):
idx = -1
v = None
l1 = len(doneTasks)
while (l1 < idx):
doneTasks.append(None)
l1 = (l1 + 1)
if (l1 == idx):
doneTasks.append(v)
else:
doneTasks[idx] = v
elif (l > 0):
pos = 0
_hx_len = l
if (pos < 0):
pos = (len(doneTasks) + pos)
if (pos < 0):
pos = 0
res = doneTasks[pos:(pos + _hx_len)]
del doneTasks[pos:(pos + _hx_len)]
t2 = _gthis._newScheduledTasks.pop()
if (t2 is None):
Sys.sleep(0.01)
continue
startAt = (python_lib_Time.time() * 1000)
_this = _gthis._scheduledTasks
_this.append(t2)
while (not ((((python_lib_Time.time() * 1000) - startAt) > 10))):
t3 = _gthis._newScheduledTasks.pop()
if (t3 is None):
break
_this1 = _gthis._scheduledTasks
_this1.append(t3)
_g = 0
_g1 = _gthis._scheduledTasks
while (_g < len(_g1)):
t1 = (_g1[_g] if _g >= 0 and _g < len(_g1) else None)
_g = (_g + 1)
t1.cancel()
while True:
t1 = _gthis._newScheduledTasks.pop()
if (t1 is None):
break
t1.cancel()
def _hx_local_6():
return (_gthis._threadPool.state == hx_concurrent_ServiceState.STOPPED)
hx_concurrent_thread_Threads._hx_await(_hx_local_6,-1)
_gthis.set_state(hx_concurrent_ServiceState.STOPPED)
hx_concurrent_thread_Threads.spawn(_hx_local_7)
def submit(self,task,schedule = None):
_gthis = self
schedule1 = (hx_concurrent_executor_Executor.NOW_ONCE if ((schedule is None)) else schedule)
def _hx_local_2():
def _hx_local_1():
if (_gthis.state != hx_concurrent_ServiceState.RUNNING):
raise haxe_Exception.thrown((("Cannot accept new tasks. Executor is not in state [RUNNING] but [" + Std.string(_gthis.state)) + "]."))
future = hx_concurrent_executor__ThreadPoolExecutor_TaskFutureImpl(_gthis,task,schedule1)
if (schedule1.index == 0):
_g = schedule1.params[0]
if future.isDue():
def _hx_local_0(ctx):
future.run()
_gthis._threadPool.submit(_hx_local_0)
return future
_gthis._newScheduledTasks.push(future)
return future
return self._stateLock.execute(_hx_local_1)
return _hx_local_2()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._threadPool = None
_hx_o._scheduledTasks = None
_hx_o._newScheduledTasks = None
hx_concurrent_executor_ThreadPoolExecutor._hx_class = hx_concurrent_executor_ThreadPoolExecutor
_hx_classes["hx.concurrent.executor.ThreadPoolExecutor"] = hx_concurrent_executor_ThreadPoolExecutor
class hx_concurrent_executor__ThreadPoolExecutor_TaskFutureImpl(hx_concurrent_executor_AbstractTaskFuture):
_hx_class_name = "hx.concurrent.executor._ThreadPoolExecutor.TaskFutureImpl"
_hx_is_interface = "False"
__slots__ = ("_nextRunAt",)
_hx_fields = ["_nextRunAt"]
_hx_methods = ["isDue", "run"]
_hx_statics = []
_hx_interfaces = []
_hx_super = hx_concurrent_executor_AbstractTaskFuture
def __init__(self,executor,task,schedule):
self._nextRunAt = None
super().__init__(executor,task,schedule)
self._nextRunAt = hx_concurrent_executor_ScheduleTools.firstRunAt(self.schedule)
def isDue(self):
if (self.isStopped or ((self._nextRunAt == -1))):
return False
if ((python_lib_Time.time() * 1000) >= self._nextRunAt):
_g = self.schedule
tmp = _g.index
if (tmp == 0):
_g1 = _g.params[0]
self._nextRunAt = -1
elif (tmp == 1):
_g1 = _g.params[0]
_g1 = _g.params[1]
self._nextRunAt = -1
elif (tmp == 2):
_g1 = _g.params[1]
intervalMS = _g.params[0]
_hx_local_0 = self
_hx_local_1 = _hx_local_0._nextRunAt
_hx_local_0._nextRunAt = (_hx_local_1 + intervalMS)
_hx_local_0._nextRunAt
elif (tmp == 3):
_g1 = _g.params[0]
_g1 = _g.params[1]
_hx_local_2 = self
_hx_local_3 = _hx_local_2._nextRunAt
_hx_local_2._nextRunAt = (_hx_local_3 + 3600000)
_hx_local_2._nextRunAt
elif (tmp == 4):
_g1 = _g.params[0]
_g1 = _g.params[1]
_g1 = _g.params[2]
_hx_local_4 = self
_hx_local_5 = _hx_local_4._nextRunAt
_hx_local_4._nextRunAt = (_hx_local_5 + 86400000)
_hx_local_4._nextRunAt
elif (tmp == 5):
_g1 = _g.params[0]
_g1 = _g.params[1]
_g1 = _g.params[2]
_g1 = _g.params[3]
_hx_local_6 = self
_hx_local_7 = _hx_local_6._nextRunAt
_hx_local_6._nextRunAt = (_hx_local_7 + 604800000)
_hx_local_6._nextRunAt
else:
pass
return True
return False
def run(self):
if self.isStopped:
return
fnResult = None
try:
_g = self._task
fnResult1 = _g.index
if (fnResult1 == 0):
functionWithReturnValue = _g.params[0]
this1 = hx_concurrent_internal__Either2__Either2.a(functionWithReturnValue())
fnResult = this1
elif (fnResult1 == 1):
functionWithoutReturnValue = _g.params[0]
functionWithoutReturnValue()
fnResult = None
else:
pass
except BaseException as _g:
ex = haxe_Exception.caught(_g)
this1 = hx_concurrent_internal__Either2__Either2.b(hx_concurrent_ConcurrentException(ex))
fnResult = this1
_g = self.schedule
tmp = _g.index
if (tmp == 0):
_g1 = _g.params[0]
self.isStopped = True
elif (tmp == 1):
_g1 = _g.params[1]
intervalMS = _g.params[0]
self._nextRunAt = ((python_lib_Time.time() * 1000) + intervalMS)
else:
pass
self.complete(fnResult,True)
self._executor.notifyResult(self.result)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._nextRunAt = None
hx_concurrent_executor__ThreadPoolExecutor_TaskFutureImpl._hx_class = hx_concurrent_executor__ThreadPoolExecutor_TaskFutureImpl
_hx_classes["hx.concurrent.executor._ThreadPoolExecutor.TaskFutureImpl"] = hx_concurrent_executor__ThreadPoolExecutor_TaskFutureImpl
class hx_concurrent_executor_TimerExecutor(hx_concurrent_executor_Executor):
_hx_class_name = "hx.concurrent.executor.TimerExecutor"
_hx_is_interface = "False"
__slots__ = ("_scheduledTasks",)
_hx_fields = ["_scheduledTasks"]
_hx_methods = ["submit"]
_hx_statics = []
_hx_interfaces = []
_hx_super = hx_concurrent_executor_Executor
def __init__(self,autostart = None):
if (autostart is None):
autostart = True
self._scheduledTasks = []
super().__init__()
if autostart:
self.start()
def submit(self,task,schedule = None):
_gthis = self
schedule1 = (hx_concurrent_executor_Executor.NOW_ONCE if ((schedule is None)) else schedule)
def _hx_local_1():
def _hx_local_0():
if (_gthis.state != hx_concurrent_ServiceState.RUNNING):
raise haxe_Exception.thrown((("Cannot accept new tasks. Executor is not in state [RUNNING] but [" + Std.string(_gthis.state)) + "]."))
i = len(_gthis._scheduledTasks)
while True:
tmp = i
i = (i - 1)
if (not ((tmp > 0))):
break
if (_gthis._scheduledTasks[i] if i >= 0 and i < len(_gthis._scheduledTasks) else None).isStopped:
_this = _gthis._scheduledTasks
pos = i
if (pos < 0):
pos = (len(_this) + pos)
if (pos < 0):
pos = 0
res = _this[pos:(pos + 1)]
del _this[pos:(pos + 1)]
future = hx_concurrent_executor__TimerExecutor_TaskFutureImpl(_gthis,task,schedule1)
if (schedule1.index == 0):
_g = schedule1.params[0]
if (_g is None):
_this = _gthis._scheduledTasks
_this.append(future)
elif (_g != 0):
_this = _gthis._scheduledTasks
_this.append(future)
else:
_this = _gthis._scheduledTasks
_this.append(future)
return future
return self._stateLock.execute(_hx_local_0)
return _hx_local_1()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._scheduledTasks = None
hx_concurrent_executor_TimerExecutor._hx_class = hx_concurrent_executor_TimerExecutor
_hx_classes["hx.concurrent.executor.TimerExecutor"] = hx_concurrent_executor_TimerExecutor
class hx_concurrent_executor__TimerExecutor_TaskFutureImpl(hx_concurrent_executor_AbstractTaskFuture):
_hx_class_name = "hx.concurrent.executor._TimerExecutor.TaskFutureImpl"
_hx_is_interface = "False"
__slots__ = ("_timer",)
_hx_fields = ["_timer"]
_hx_methods = ["run", "cancel"]
_hx_statics = []
_hx_interfaces = []
_hx_super = hx_concurrent_executor_AbstractTaskFuture
def __init__(self,executor,task,schedule):
self._timer = None
super().__init__(executor,task,schedule)
x = (hx_concurrent_executor_ScheduleTools.firstRunAt(self.schedule) - ((python_lib_Time.time() * 1000)))
initialDelay = None
try:
initialDelay = int(x)
except BaseException as _g:
None
initialDelay = None
if (initialDelay < 0):
initialDelay = 0
haxe_Timer.delay(self.run,initialDelay)
def run(self):
if self.isStopped:
return
if (self._timer is None):
t = None
_g = self.schedule
tmp = _g.index
if (tmp == 2):
_g1 = _g.params[1]
intervalMS = _g.params[0]
t = haxe_Timer(intervalMS)
t.run = self.run
elif (tmp == 3):
_g1 = _g.params[0]
_g1 = _g.params[1]
t = haxe_Timer(3600000)
t.run = self.run
elif (tmp == 4):
_g1 = _g.params[0]
_g1 = _g.params[1]
_g1 = _g.params[2]
t = haxe_Timer(86400000)
t.run = self.run
elif (tmp == 5):
_g1 = _g.params[0]
_g1 = _g.params[1]
_g1 = _g.params[2]
_g1 = _g.params[3]
t = haxe_Timer(604800000)
t.run = self.run
else:
pass
self._timer = t
fnResult = None
try:
_g = self._task
fnResult1 = _g.index
if (fnResult1 == 0):
functionWithReturnValue = _g.params[0]
this1 = hx_concurrent_internal__Either2__Either2.a(functionWithReturnValue())
fnResult = this1
elif (fnResult1 == 1):
functionWithoutReturnValue = _g.params[0]
functionWithoutReturnValue()
fnResult = None
else:
pass
except BaseException as _g:
ex = haxe_Exception.caught(_g)
this1 = hx_concurrent_internal__Either2__Either2.b(hx_concurrent_ConcurrentException(ex))
fnResult = this1
_g = self.schedule
tmp = _g.index
if (tmp == 0):
_g1 = _g.params[0]
self.isStopped = True
elif (tmp == 1):
_g1 = _g.params[1]
intervalMS = _g.params[0]
self._timer = haxe_Timer.delay(self.run,intervalMS)
else:
pass
self.complete(fnResult,True)
self._executor.notifyResult(self.result)
def cancel(self):
t = self._timer
if (t is not None):
t.stop()
super().cancel()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._timer = None
hx_concurrent_executor__TimerExecutor_TaskFutureImpl._hx_class = hx_concurrent_executor__TimerExecutor_TaskFutureImpl
_hx_classes["hx.concurrent.executor._TimerExecutor.TaskFutureImpl"] = hx_concurrent_executor__TimerExecutor_TaskFutureImpl
class hx_concurrent_internal__Either2__Either2(Enum):
__slots__ = ()
_hx_class_name = "hx.concurrent.internal._Either2._Either2"
_hx_constructs = ["a", "b"]
@staticmethod
def a(v):
return hx_concurrent_internal__Either2__Either2("a", 0, (v,))
@staticmethod
def b(v):
return hx_concurrent_internal__Either2__Either2("b", 1, (v,))
hx_concurrent_internal__Either2__Either2._hx_class = hx_concurrent_internal__Either2__Either2
_hx_classes["hx.concurrent.internal._Either2._Either2"] = hx_concurrent_internal__Either2__Either2
class hx_concurrent_internal__Either3__Either3(Enum):
__slots__ = ()
_hx_class_name = "hx.concurrent.internal._Either3._Either3"
_hx_constructs = ["a", "b", "c"]
@staticmethod
def a(v):
return hx_concurrent_internal__Either3__Either3("a", 0, (v,))
@staticmethod
def b(v):
return hx_concurrent_internal__Either3__Either3("b", 1, (v,))
@staticmethod
def c(v):
return hx_concurrent_internal__Either3__Either3("c", 2, (v,))
hx_concurrent_internal__Either3__Either3._hx_class = hx_concurrent_internal__Either3__Either3
_hx_classes["hx.concurrent.internal._Either3._Either3"] = hx_concurrent_internal__Either3__Either3
class hx_concurrent_lock_Acquirable:
_hx_class_name = "hx.concurrent.lock.Acquirable"
_hx_is_interface = "True"
__slots__ = ()
hx_concurrent_lock_Acquirable._hx_class = hx_concurrent_lock_Acquirable
_hx_classes["hx.concurrent.lock.Acquirable"] = hx_concurrent_lock_Acquirable
class hx_concurrent_lock_AbstractAcquirable:
_hx_class_name = "hx.concurrent.lock.AbstractAcquirable"
_hx_is_interface = "False"
__slots__ = ()
_hx_methods = ["release", "acquire", "execute"]
_hx_interfaces = [hx_concurrent_lock_Acquirable]
def execute(self,func,swallowExceptions = None):
if (swallowExceptions is None):
swallowExceptions = False
ex = None
result = None
self.acquire()
try:
result = func()
except BaseException as _g:
e = haxe_Exception.caught(_g)
ex = hx_concurrent_ConcurrentException(e)
self.release()
if ((not swallowExceptions) and ((ex is not None))):
ex.rethrow()
return result
@staticmethod
def _hx_empty_init(_hx_o): pass
hx_concurrent_lock_AbstractAcquirable._hx_class = hx_concurrent_lock_AbstractAcquirable
_hx_classes["hx.concurrent.lock.AbstractAcquirable"] = hx_concurrent_lock_AbstractAcquirable
class hx_concurrent_lock_RLock(hx_concurrent_lock_AbstractAcquirable):
_hx_class_name = "hx.concurrent.lock.RLock"
_hx_is_interface = "False"
__slots__ = ("_rlock", "_holder", "_holderEntranceCount")
_hx_fields = ["_rlock", "_holder", "_holderEntranceCount"]
_hx_methods = ["acquire", "release"]
_hx_statics = []
_hx_interfaces = []
_hx_super = hx_concurrent_lock_AbstractAcquirable
def __init__(self):
self._holderEntranceCount = 0
self._holder = None
self._rlock = python_lib_threading_RLock()
def acquire(self):
self._rlock.acquire()
self._holder = hx_concurrent_thread_Threads.get_current()
_hx_local_0 = self
_hx_local_1 = _hx_local_0._holderEntranceCount
_hx_local_0._holderEntranceCount = (_hx_local_1 + 1)
_hx_local_1
def release(self):
if HxOverrides.eq(self._holder,hx_concurrent_thread_Threads.get_current()):
_hx_local_0 = self
_hx_local_1 = _hx_local_0._holderEntranceCount
_hx_local_0._holderEntranceCount = (_hx_local_1 - 1)
_hx_local_1
if (self._holderEntranceCount == 0):
self._holder = None
elif ((self._holder is not None) and (not HxOverrides.eq(self._holder,hx_concurrent_thread_Threads.get_current()))):
raise haxe_Exception.thrown("Lock was aquired by another thread!")
else:
raise haxe_Exception.thrown("Lock was not aquired by any thread!")
self._rlock.release()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._rlock = None
_hx_o._holder = None
_hx_o._holderEntranceCount = None
hx_concurrent_lock_RLock._hx_class = hx_concurrent_lock_RLock
_hx_classes["hx.concurrent.lock.RLock"] = hx_concurrent_lock_RLock
class hx_concurrent_thread_ThreadPool(hx_concurrent_ServiceBase):
_hx_class_name = "hx.concurrent.thread.ThreadPool"
_hx_is_interface = "False"
__slots__ = ("_spawnedThreadCount", "_workingThreadCount", "_workQueue", "threadCount", "pollPeriod")
_hx_fields = ["_spawnedThreadCount", "_workingThreadCount", "_workQueue", "threadCount", "pollPeriod"]
_hx_methods = ["onStart", "submit"]
_hx_statics = ["DEFAULT_POLL_PERIOD", "_threadIDs"]
_hx_interfaces = []
_hx_super = hx_concurrent_ServiceBase
def __init__(self,numThreads,autostart = None):
if (autostart is None):
autostart = True
self.threadCount = None
self.pollPeriod = hx_concurrent_thread_ThreadPool.DEFAULT_POLL_PERIOD
self._workQueue = hx_concurrent_collection_Queue()
val = 0
if (val is None):
val = 0
this1 = hx_concurrent_atomic__AtomicInt_AtomicIntImpl(val)
self._workingThreadCount = this1
val = 0
if (val is None):
val = 0
this1 = hx_concurrent_atomic__AtomicInt_AtomicIntImpl(val)
self._spawnedThreadCount = this1
if (numThreads < 1):
raise haxe_Exception.thrown("[numThreads] must be > 0")
super().__init__()
self.threadCount = numThreads
if autostart:
self.start()
def onStart(self):
_gthis = self
self.set_state(hx_concurrent_ServiceState.RUNNING)
_g = 0
_g1 = self.threadCount
while (_g < _g1):
i = _g
_g = (_g + 1)
def _hx_local_1():
_gthis._spawnedThreadCount.getAndIncrement()
context = hx_concurrent_thread_ThreadContext(hx_concurrent_thread_ThreadPool._threadIDs.incrementAndGet())
haxe_Log.trace((((((((("[" + Std.string(_gthis)) + "] Spawned thread ") + HxOverrides.stringOrNull((("null" if ((_gthis._spawnedThreadCount is None)) else Std.string(_gthis._spawnedThreadCount.get_value()))))) + "/") + Std.string(_gthis.threadCount)) + " with ID ") + Std.string(context.id)) + "."),_hx_AnonObject({'fileName': "hx/concurrent/thread/ThreadPool.hx", 'lineNumber': 107, 'className': "hx.concurrent.thread.ThreadPool", 'methodName': "onStart"}))
while True:
task = _gthis._workQueue.pop()
if (task is None):
if (_gthis.state != hx_concurrent_ServiceState.RUNNING):
break
Sys.sleep(_gthis.pollPeriod)
else:
try:
_gthis._workingThreadCount.getAndIncrement()
task(context)
except BaseException as _g:
ex = haxe_Exception.caught(_g)
haxe_Log.trace(ex,_hx_AnonObject({'fileName': "hx/concurrent/thread/ThreadPool.hx", 'lineNumber': 120, 'className': "hx.concurrent.thread.ThreadPool", 'methodName': "onStart"}))
_gthis._workingThreadCount.getAndIncrement(-1)
haxe_Log.trace((((("[" + Std.string(_gthis)) + "] Stopped thread with ID ") + Std.string(context.id)) + "."),_hx_AnonObject({'fileName': "hx/concurrent/thread/ThreadPool.hx", 'lineNumber': 126, 'className': "hx.concurrent.thread.ThreadPool", 'methodName': "onStart"}))
_gthis._spawnedThreadCount.getAndIncrement(-1)
if (_gthis._spawnedThreadCount.get_value() == 0):
def _hx_local_0():
return _gthis.set_state(hx_concurrent_ServiceState.STOPPED)
_gthis._stateLock.execute(_hx_local_0)
hx_concurrent_thread_Threads.spawn(_hx_local_1)
def submit(self,task):
_gthis = self
if (task is None):
raise haxe_Exception.thrown("[task] must not be null")
def _hx_local_0():
if (_gthis.state != hx_concurrent_ServiceState.RUNNING):
raise haxe_Exception.thrown((("ThreadPool is not in requried state [RUNNING] but [" + Std.string(_gthis.state)) + "]"))
_gthis._workQueue.push(task)
self._stateLock.execute(_hx_local_0)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._spawnedThreadCount = None
_hx_o._workingThreadCount = None
_hx_o._workQueue = None
_hx_o.threadCount = None
_hx_o.pollPeriod = None
hx_concurrent_thread_ThreadPool._hx_class = hx_concurrent_thread_ThreadPool
_hx_classes["hx.concurrent.thread.ThreadPool"] = hx_concurrent_thread_ThreadPool
class hx_concurrent_thread_ThreadContext:
_hx_class_name = "hx.concurrent.thread.ThreadContext"
_hx_is_interface = "False"
__slots__ = ("id",)
_hx_fields = ["id"]
def __init__(self,id):
self.id = id
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.id = None
hx_concurrent_thread_ThreadContext._hx_class = hx_concurrent_thread_ThreadContext
_hx_classes["hx.concurrent.thread.ThreadContext"] = hx_concurrent_thread_ThreadContext
class hx_concurrent_thread_Threads:
_hx_class_name = "hx.concurrent.thread.Threads"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["get_current", "get_isSupported", "await", "spawn"]
current = None
isSupported = None
@staticmethod
def get_current():
return sys_thread__Thread_HxThread.current()
@staticmethod
def get_isSupported():
try:
from threading import Thread
return True
except BaseException as _g:
return False
@staticmethod
def _hx_await(condition,timeoutMS,waitLoopSleepMS = None):
if (waitLoopSleepMS is None):
waitLoopSleepMS = 10
if (timeoutMS < -1):
raise haxe_Exception.thrown("[timeoutMS] must be >= -1")
if (timeoutMS == 0):
return condition()
waitLoopSleepSecs = (waitLoopSleepMS / 1000.0)
startAt = (python_lib_Time.time() * 1000)
while (not condition()):
if (timeoutMS > 0):
elapsedMS = ((python_lib_Time.time() * 1000) - startAt)
if (elapsedMS >= timeoutMS):
return False
Sys.sleep(waitLoopSleepSecs)
return True
@staticmethod
def spawn(func):
t = python_lib_threading_Thread(**python__KwArgs_KwArgs_Impl_.fromT(_hx_AnonObject({'target': func})))
t.daemon = True
t.start()
hx_concurrent_thread_Threads._hx_class = hx_concurrent_thread_Threads
_hx_classes["hx.concurrent.thread.Threads"] = hx_concurrent_thread_Threads
class pako_Inflate:
_hx_class_name = "pako.Inflate"
_hx_is_interface = "False"
__slots__ = ("options", "err", "msg", "ended", "chunks", "strm", "header", "result", "onData", "onEnd")
_hx_fields = ["options", "err", "msg", "ended", "chunks", "strm", "header", "result", "onData", "onEnd"]
_hx_methods = ["push", "_onData", "_onEnd"]
_hx_statics = ["DEFAULT_OPTIONS"]
def __init__(self,options = None):
self.onEnd = None
self.onData = None
self.result = None
self.header = pako_zlib_GZHeader()
self.strm = pako_zlib_ZStream()
self.chunks = []
self.ended = False
self.msg = ""
self.err = 0
self.options = None
self.options = _hx_AnonObject({})
Reflect.setField(self.options,"chunkSize",(Reflect.field(options,"chunkSize") if (((options is not None) and ((Reflect.field(options,"chunkSize") is not None)))) else Reflect.field(pako_Inflate.DEFAULT_OPTIONS,"chunkSize")))
Reflect.setField(self.options,"windowBits",(Reflect.field(options,"windowBits") if (((options is not None) and ((Reflect.field(options,"windowBits") is not None)))) else Reflect.field(pako_Inflate.DEFAULT_OPTIONS,"windowBits")))
Reflect.setField(self.options,"raw",(Reflect.field(options,"raw") if (((options is not None) and ((Reflect.field(options,"raw") is not None)))) else Reflect.field(pako_Inflate.DEFAULT_OPTIONS,"raw")))
Reflect.setField(self.options,"dictionary",(Reflect.field(options,"dictionary") if (((options is not None) and ((Reflect.field(options,"dictionary") is not None)))) else Reflect.field(pako_Inflate.DEFAULT_OPTIONS,"dictionary")))
if ((Reflect.field(self.options,"raw") and ((Reflect.field(self.options,"windowBits") >= 0))) and ((Reflect.field(self.options,"windowBits") < 16))):
Reflect.setField(self.options,"windowBits",-Reflect.field(self.options,"windowBits"))
if (Reflect.field(self.options,"windowBits") == 0):
Reflect.setField(self.options,"windowBits",-15)
if (((Reflect.field(self.options,"windowBits") >= 0) and ((Reflect.field(self.options,"windowBits") < 16))) and (((options is None) or ((Reflect.field(options,"windowBits") is None))))):
_hx_local_0 = self.options
Reflect.setField(_hx_local_0,"windowBits",(Reflect.field(_hx_local_0,"windowBits") + 32))
if ((Reflect.field(self.options,"windowBits") > 15) and ((Reflect.field(self.options,"windowBits") < 48))):
if (((Reflect.field(self.options,"windowBits") & 15)) == 0):
_hx_local_1 = self.options
Reflect.setField(_hx_local_1,"windowBits",(Reflect.field(_hx_local_1,"windowBits") | 15))
self.onData = self._onData
self.onEnd = self._onEnd
self.strm.avail_out = 0
status = pako_zlib_Inflate.inflateInit2(self.strm,Reflect.field(self.options,"windowBits"))
if (status != 0):
raise haxe_Exception.thrown(pako_zlib_Messages.get(status))
pako_zlib_Inflate.inflateGetHeader(self.strm,self.header)
def push(self,data,mode = None):
if (mode is None):
mode = False
strm = self.strm
chunkSize = Reflect.field(self.options,"chunkSize")
dictionary = Reflect.field(self.options,"dictionary")
status = None
_mode = None
next_out_utf8 = None
tail = None
utf8str = None
allowBufError = False
if self.ended:
return False
if Std.isOfType(mode,Int):
_mode = mode
elif Std.isOfType(mode,Bool):
_mode = (4 if mode else 0)
else:
raise haxe_Exception.thrown("Invalid mode.")
strm.input = data
strm.next_in = 0
strm.avail_in = strm.input.byteLength
while True:
if (strm.avail_out == 0):
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(chunkSize),0,chunkSize)
this2 = this1
strm.output = this2
strm.next_out = 0
strm.avail_out = chunkSize
status = pako_zlib_Inflate.inflate(strm,0)
if ((status == 2) and ((dictionary is not None))):
status = pako_zlib_Inflate.inflateSetDictionary(self.strm,dictionary)
if ((status == -5) and allowBufError):
status = 0
allowBufError = False
if ((status != 1) and ((status != 0))):
self.onEnd(status)
self.ended = True
return False
if (strm.next_out != 0):
if (((strm.avail_out == 0) or ((status == 1))) or (((strm.avail_in == 0) and (((_mode == 4) or ((_mode == 2))))))):
tmp = self.onData
buf = strm.output
size = strm.next_out
if (buf.byteLength != size):
buf = haxe_io__UInt8Array_UInt8Array_Impl_.fromData(buf.subarray(0,size))
tmp(buf)
if ((strm.avail_in == 0) and ((strm.avail_out == 0))):
allowBufError = True
if (not (((((strm.avail_in > 0) or ((strm.avail_out == 0)))) and ((status != 1))))):
break
if (status == 1):
_mode = 4
if (_mode == 4):
status = pako_zlib_Inflate.inflateEnd(self.strm)
self.onEnd(status)
self.ended = True
return (status == 0)
if (_mode == 2):
self.onEnd(0)
strm.avail_out = 0
return True
return True
def _onData(self,chunk):
_this = self.chunks
_this.append(chunk)
def _onEnd(self,status):
if (status == 0):
self.result = pako_utils_Common.flattenChunks(self.chunks)
self.chunks = []
self.err = status
self.msg = self.strm.msg
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.options = None
_hx_o.err = None
_hx_o.msg = None
_hx_o.ended = None
_hx_o.chunks = None
_hx_o.strm = None
_hx_o.header = None
_hx_o.result = None
_hx_o.onData = None
_hx_o.onEnd = None
pako_Inflate._hx_class = pako_Inflate
_hx_classes["pako.Inflate"] = pako_Inflate
class pako_utils_Common:
_hx_class_name = "pako.utils.Common"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["flattenChunks"]
@staticmethod
def flattenChunks(chunks):
i = None
chunk = None
_hx_len = 0
l = len(chunks)
_g = 0
_g1 = l
while (_g < _g1):
i = _g
_g = (_g + 1)
_hx_len = (_hx_len + (chunks[i] if i >= 0 and i < len(chunks) else None).byteLength)
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(_hx_len),0,_hx_len)
this2 = this1
result = this2
pos = 0
_g = 0
_g1 = l
while (_g < _g1):
i = _g
_g = (_g + 1)
chunk = (chunks[i] if i >= 0 and i < len(chunks) else None)
result.bytes.blit(pos,chunk.bytes,0,chunk.byteLength)
pos = (pos + chunk.byteLength)
return result
pako_utils_Common._hx_class = pako_utils_Common
_hx_classes["pako.utils.Common"] = pako_utils_Common
class pako_zlib_Adler32:
_hx_class_name = "pako.zlib.Adler32"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["adler32"]
@staticmethod
def adler32(adler,buf,_hx_len,pos):
s1 = ((adler & 65535) | 0)
s2 = ((HxOverrides.rshift(adler, 16) & 65535) | 0)
n = 0
while (_hx_len != 0):
n = (2000 if ((_hx_len > 2000)) else _hx_len)
_hx_len = (_hx_len - n)
while True:
index = pos
pos = (pos + 1)
s1 = ((s1 + buf.bytes.b[(index + buf.byteOffset)]) | 0)
s2 = ((s2 + s1) | 0)
n = (n - 1)
tmp = n
if (not ((tmp != 0))):
break
s1 = HxOverrides.mod(s1, 65521)
s2 = HxOverrides.mod(s2, 65521)
return ((s1 | ((s2 << 16))) | 0)
pako_zlib_Adler32._hx_class = pako_zlib_Adler32
_hx_classes["pako.zlib.Adler32"] = pako_zlib_Adler32
class pako_zlib_CRC32:
_hx_class_name = "pako.zlib.CRC32"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["makeTable", "crcTable", "crc32"]
@staticmethod
def makeTable():
c = None
this1 = [None]*256
table = this1
_g = 0
while (_g < 256):
n = _g
_g = (_g + 1)
c = n
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
if (((c & 1)) == 1):
c = (-306674912 ^ (HxOverrides.rshift(c, 1)))
else:
c = HxOverrides.rshift(c, 1)
table[n] = c
return table
@staticmethod
def crc32(crc,buf,_hx_len,pos):
t = pako_zlib_CRC32.crcTable
end = (pos + _hx_len)
crc = (crc ^ -1)
_g = pos
_g1 = end
while (_g < _g1):
i = _g
_g = (_g + 1)
crc = (HxOverrides.rshift(crc, 8) ^ t[(((crc ^ buf.bytes.b[(i + buf.byteOffset)])) & 255)])
return (crc ^ -1)
pako_zlib_CRC32._hx_class = pako_zlib_CRC32
_hx_classes["pako.zlib.CRC32"] = pako_zlib_CRC32
class pako_zlib_GZHeader:
_hx_class_name = "pako.zlib.GZHeader"
_hx_is_interface = "False"
__slots__ = ("text", "time", "xflags", "os", "extra", "extra_len", "name", "comment", "hcrc", "done")
_hx_fields = ["text", "time", "xflags", "os", "extra", "extra_len", "name", "comment", "hcrc", "done"]
def __init__(self):
self.done = False
self.hcrc = 0
self.comment = ""
self.name = ""
self.extra_len = 0
self.extra = None
self.os = 0
self.xflags = 0
self.time = 0
self.text = False
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.text = None
_hx_o.time = None
_hx_o.xflags = None
_hx_o.os = None
_hx_o.extra = None
_hx_o.extra_len = None
_hx_o.name = None
_hx_o.comment = None
_hx_o.hcrc = None
_hx_o.done = None
pako_zlib_GZHeader._hx_class = pako_zlib_GZHeader
_hx_classes["pako.zlib.GZHeader"] = pako_zlib_GZHeader
class pako_zlib_InfFast:
_hx_class_name = "pako.zlib.InfFast"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["inflate_fast"]
@staticmethod
def inflate_fast(strm,start):
here = None
op = None
_hx_len = None
dist = None
_hx_from = None
from_source = None
state = strm.inflateState
_in = strm.next_in
input = strm.input
last = (_in + ((strm.avail_in - 5)))
_out = strm.next_out
output = strm.output
beg = (_out - ((start - strm.avail_out)))
end = (_out + ((strm.avail_out - 257)))
dmax = state.dmax
wsize = state.wsize
whave = state.whave
wnext = state.wnext
s_window = state.window
hold = state.hold
bits = state.bits
lcode = state.lencode
dcode = state.distcode
lmask = (((1 << state.lenbits)) - 1)
dmask = (((1 << state.distbits)) - 1)
exit_top = False
while (not exit_top):
exit_top = False
if (bits < 15):
index = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index + input.byteOffset)] << bits)))
bits = (bits + 8)
index1 = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index1 + input.byteOffset)] << bits)))
bits = (bits + 8)
_this = lcode.bytes
pos = (((((hold & lmask)) << 2)) + lcode.byteOffset)
v = (((_this.b[pos] | ((_this.b[(pos + 1)] << 8))) | ((_this.b[(pos + 2)] << 16))) | ((_this.b[(pos + 3)] << 24)))
here = ((v | -2147483648) if ((((v & -2147483648)) != 0)) else v)
while True:
op = HxOverrides.rshift(here, 24)
hold = HxOverrides.rshift(hold, op)
bits = (bits - op)
op = (HxOverrides.rshift(here, 16) & 255)
if (op == 0):
index2 = _out
_out = (_out + 1)
value = (here & 65535)
if ((index2 >= 0) and ((index2 < output.byteLength))):
output.bytes.b[(index2 + output.byteOffset)] = (value & 255)
elif (((op & 16)) != 0):
_hx_len = (here & 65535)
op = (op & 15)
if (op != 0):
if (bits < op):
index3 = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index3 + input.byteOffset)] << bits)))
bits = (bits + 8)
_hx_len = (_hx_len + ((hold & ((((1 << op)) - 1)))))
hold = HxOverrides.rshift(hold, op)
bits = (bits - op)
if (bits < 15):
index4 = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index4 + input.byteOffset)] << bits)))
bits = (bits + 8)
index5 = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index5 + input.byteOffset)] << bits)))
bits = (bits + 8)
_this1 = dcode.bytes
pos1 = (((((hold & dmask)) << 2)) + dcode.byteOffset)
v1 = (((_this1.b[pos1] | ((_this1.b[(pos1 + 1)] << 8))) | ((_this1.b[(pos1 + 2)] << 16))) | ((_this1.b[(pos1 + 3)] << 24)))
here = ((v1 | -2147483648) if ((((v1 & -2147483648)) != 0)) else v1)
while True:
op = HxOverrides.rshift(here, 24)
hold = HxOverrides.rshift(hold, op)
bits = (bits - op)
op = (HxOverrides.rshift(here, 16) & 255)
if (((op & 16)) != 0):
dist = (here & 65535)
op = (op & 15)
if (bits < op):
index6 = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index6 + input.byteOffset)] << bits)))
bits = (bits + 8)
if (bits < op):
index7 = _in
_in = (_in + 1)
hold = (hold + ((input.bytes.b[(index7 + input.byteOffset)] << bits)))
bits = (bits + 8)
dist = (dist + ((hold & ((((1 << op)) - 1)))))
if (dist > dmax):
strm.msg = "invalid distance too far back"
state.mode = 30
exit_top = True
break
hold = HxOverrides.rshift(hold, op)
bits = (bits - op)
op = (_out - beg)
if (dist > op):
op = (dist - op)
if (op > whave):
if (state.sane != 0):
strm.msg = "invalid distance too far back"
state.mode = 30
exit_top = True
break
_hx_from = 0
from_source = s_window
if (wnext == 0):
_hx_from = (_hx_from + ((wsize - op)))
if (op < _hx_len):
_hx_len = (_hx_len - op)
while True:
index8 = _out
_out = (_out + 1)
index9 = _hx_from
_hx_from = (_hx_from + 1)
value1 = s_window.bytes.b[(index9 + s_window.byteOffset)]
if ((index8 >= 0) and ((index8 < output.byteLength))):
output.bytes.b[(index8 + output.byteOffset)] = (value1 & 255)
op = (op - 1)
tmp = op
if (not ((tmp != 0))):
break
_hx_from = (_out - dist)
from_source = output
elif (wnext < op):
_hx_from = (_hx_from + (((wsize + wnext) - op)))
op = (op - wnext)
if (op < _hx_len):
_hx_len = (_hx_len - op)
while True:
index10 = _out
_out = (_out + 1)
index11 = _hx_from
_hx_from = (_hx_from + 1)
value2 = s_window.bytes.b[(index11 + s_window.byteOffset)]
if ((index10 >= 0) and ((index10 < output.byteLength))):
output.bytes.b[(index10 + output.byteOffset)] = (value2 & 255)
op = (op - 1)
tmp1 = op
if (not ((tmp1 != 0))):
break
_hx_from = 0
if (wnext < _hx_len):
op = wnext
_hx_len = (_hx_len - op)
while True:
index12 = _out
_out = (_out + 1)
index13 = _hx_from
_hx_from = (_hx_from + 1)
value3 = s_window.bytes.b[(index13 + s_window.byteOffset)]
if ((index12 >= 0) and ((index12 < output.byteLength))):
output.bytes.b[(index12 + output.byteOffset)] = (value3 & 255)
op = (op - 1)
tmp2 = op
if (not ((tmp2 != 0))):
break
_hx_from = (_out - dist)
from_source = output
else:
_hx_from = (_hx_from + ((wnext - op)))
if (op < _hx_len):
_hx_len = (_hx_len - op)
while True:
index14 = _out
_out = (_out + 1)
index15 = _hx_from
_hx_from = (_hx_from + 1)
value4 = s_window.bytes.b[(index15 + s_window.byteOffset)]
if ((index14 >= 0) and ((index14 < output.byteLength))):
output.bytes.b[(index14 + output.byteOffset)] = (value4 & 255)
op = (op - 1)
tmp3 = op
if (not ((tmp3 != 0))):
break
_hx_from = (_out - dist)
from_source = output
while (_hx_len > 2):
index16 = _out
_out = (_out + 1)
index17 = _hx_from
_hx_from = (_hx_from + 1)
value5 = from_source.bytes.b[(index17 + from_source.byteOffset)]
if ((index16 >= 0) and ((index16 < output.byteLength))):
output.bytes.b[(index16 + output.byteOffset)] = (value5 & 255)
index18 = _out
_out = (_out + 1)
index19 = _hx_from
_hx_from = (_hx_from + 1)
value6 = from_source.bytes.b[(index19 + from_source.byteOffset)]
if ((index18 >= 0) and ((index18 < output.byteLength))):
output.bytes.b[(index18 + output.byteOffset)] = (value6 & 255)
index20 = _out
_out = (_out + 1)
index21 = _hx_from
_hx_from = (_hx_from + 1)
value7 = from_source.bytes.b[(index21 + from_source.byteOffset)]
if ((index20 >= 0) and ((index20 < output.byteLength))):
output.bytes.b[(index20 + output.byteOffset)] = (value7 & 255)
_hx_len = (_hx_len - 3)
if (_hx_len != 0):
index22 = _out
_out = (_out + 1)
index23 = _hx_from
_hx_from = (_hx_from + 1)
value8 = from_source.bytes.b[(index23 + from_source.byteOffset)]
if ((index22 >= 0) and ((index22 < output.byteLength))):
output.bytes.b[(index22 + output.byteOffset)] = (value8 & 255)
if (_hx_len > 1):
index24 = _out
_out = (_out + 1)
index25 = _hx_from
_hx_from = (_hx_from + 1)
value9 = from_source.bytes.b[(index25 + from_source.byteOffset)]
if ((index24 >= 0) and ((index24 < output.byteLength))):
output.bytes.b[(index24 + output.byteOffset)] = (value9 & 255)
else:
_hx_from = (_out - dist)
while True:
index26 = _out
_out = (_out + 1)
index27 = _hx_from
_hx_from = (_hx_from + 1)
value10 = output.bytes.b[(index27 + output.byteOffset)]
if ((index26 >= 0) and ((index26 < output.byteLength))):
output.bytes.b[(index26 + output.byteOffset)] = (value10 & 255)
index28 = _out
_out = (_out + 1)
index29 = _hx_from
_hx_from = (_hx_from + 1)
value11 = output.bytes.b[(index29 + output.byteOffset)]
if ((index28 >= 0) and ((index28 < output.byteLength))):
output.bytes.b[(index28 + output.byteOffset)] = (value11 & 255)
index30 = _out
_out = (_out + 1)
index31 = _hx_from
_hx_from = (_hx_from + 1)
value12 = output.bytes.b[(index31 + output.byteOffset)]
if ((index30 >= 0) and ((index30 < output.byteLength))):
output.bytes.b[(index30 + output.byteOffset)] = (value12 & 255)
_hx_len = (_hx_len - 3)
if (not ((_hx_len > 2))):
break
if (_hx_len != 0):
index32 = _out
_out = (_out + 1)
index33 = _hx_from
_hx_from = (_hx_from + 1)
value13 = output.bytes.b[(index33 + output.byteOffset)]
if ((index32 >= 0) and ((index32 < output.byteLength))):
output.bytes.b[(index32 + output.byteOffset)] = (value13 & 255)
if (_hx_len > 1):
index34 = _out
_out = (_out + 1)
index35 = _hx_from
_hx_from = (_hx_from + 1)
value14 = output.bytes.b[(index35 + output.byteOffset)]
if ((index34 >= 0) and ((index34 < output.byteLength))):
output.bytes.b[(index34 + output.byteOffset)] = (value14 & 255)
elif (((op & 64)) == 0):
_this2 = dcode.bytes
pos2 = ((((((here & 65535)) + ((hold & ((((1 << op)) - 1))))) << 2)) + dcode.byteOffset)
v2 = (((_this2.b[pos2] | ((_this2.b[(pos2 + 1)] << 8))) | ((_this2.b[(pos2 + 2)] << 16))) | ((_this2.b[(pos2 + 3)] << 24)))
here = ((v2 | -2147483648) if ((((v2 & -2147483648)) != 0)) else v2)
continue
else:
strm.msg = "invalid distance code"
state.mode = 30
exit_top = True
break
break
if exit_top:
break
elif (((op & 64)) == 0):
_this3 = lcode.bytes
pos3 = ((((((here & 65535)) + ((hold & ((((1 << op)) - 1))))) << 2)) + lcode.byteOffset)
v3 = (((_this3.b[pos3] | ((_this3.b[(pos3 + 1)] << 8))) | ((_this3.b[(pos3 + 2)] << 16))) | ((_this3.b[(pos3 + 3)] << 24)))
here = ((v3 | -2147483648) if ((((v3 & -2147483648)) != 0)) else v3)
continue
elif (((op & 32)) != 0):
state.mode = 12
exit_top = True
break
else:
strm.msg = "invalid literal/length code"
state.mode = 30
exit_top = True
break
break
if exit_top:
if (not (((_in < last) and ((_out < end))))):
break
else:
continue
if (not (((_in < last) and ((_out < end))))):
break
_hx_len = (bits >> 3)
_in = (_in - _hx_len)
bits = (bits - ((_hx_len << 3)))
hold = (hold & ((((1 << bits)) - 1)))
strm.next_in = _in
strm.next_out = _out
strm.avail_in = ((5 + ((last - _in))) if ((_in < last)) else (5 - ((_in - last))))
strm.avail_out = ((257 + ((end - _out))) if ((_out < end)) else (257 - ((_out - end))))
state.hold = hold
state.bits = bits
pako_zlib_InfFast._hx_class = pako_zlib_InfFast
_hx_classes["pako.zlib.InfFast"] = pako_zlib_InfFast
class pako_zlib_InfTrees:
_hx_class_name = "pako.zlib.InfTrees"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["MAXBITS", "ENOUGH_LENS", "ENOUGH_DISTS", "CODES", "LENS", "DISTS", "lbase", "lext", "dbase", "dext", "inflate_table"]
@staticmethod
def inflate_table(_hx_type,lens,lens_index,codes,table,table_index,work,opts):
bits = 0
_hx_len = 0
sym = 0
_hx_min = 0
_hx_max = 0
root = 0
curr = 0
drop = 0
left = 0
used = 0
huff = 0
incr = 0
fill = 0
low = 0
mask = 0
next = 0
base = None
base_index = 0
end = 0
size = (((pako_zlib_InfTrees.MAXBITS + 1)) * 2)
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
count = this2
size = (((pako_zlib_InfTrees.MAXBITS + 1)) * 2)
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
offs = this2
extra = None
extra_index = 0
bits = opts.bits
here_bits = None
here_op = None
here_val = None
_hx_len = 0
while (_hx_len <= pako_zlib_InfTrees.MAXBITS):
if ((_hx_len >= 0) and ((_hx_len < ((count.byteLength >> 1))))):
_this = count.bytes
pos = (((_hx_len << 1)) + count.byteOffset)
_this.b[pos] = 0
_this.b[(pos + 1)] = 0
_hx_len = (_hx_len + 1)
sym = 0
while (sym < codes):
_this = lens.bytes
pos = ((((lens_index + sym) << 1)) + lens.byteOffset)
_g = (_this.b[pos] | ((_this.b[(pos + 1)] << 8)))
_g1 = count
_this1 = _g1.bytes
pos1 = (((_g << 1)) + _g1.byteOffset)
value = (((_this1.b[pos1] | ((_this1.b[(pos1 + 1)] << 8)))) + 1)
if ((_g >= 0) and ((_g < ((_g1.byteLength >> 1))))):
_this2 = _g1.bytes
pos2 = (((_g << 1)) + _g1.byteOffset)
_this2.b[pos2] = (value & 255)
_this2.b[(pos2 + 1)] = ((value >> 8) & 255)
sym = (sym + 1)
root = bits
_hx_max = pako_zlib_InfTrees.MAXBITS
while (_hx_max >= 1):
_this = count.bytes
pos = (((_hx_max << 1)) + count.byteOffset)
if (((_this.b[pos] | ((_this.b[(pos + 1)] << 8)))) != 0):
break
_hx_max = (_hx_max - 1)
if (root > _hx_max):
root = _hx_max
if (_hx_max == 0):
index = table_index
table_index = (table_index + 1)
if ((index >= 0) and ((index < ((table.byteLength >> 2))))):
_this = table.bytes
pos = (((index << 2)) + table.byteOffset)
_this.b[pos] = 0
_this.b[(pos + 1)] = 0
_this.b[(pos + 2)] = 64
_this.b[(pos + 3)] = 1
index = table_index
table_index = (table_index + 1)
if ((index >= 0) and ((index < ((table.byteLength >> 2))))):
_this = table.bytes
pos = (((index << 2)) + table.byteOffset)
_this.b[pos] = 0
_this.b[(pos + 1)] = 0
_this.b[(pos + 2)] = 64
_this.b[(pos + 3)] = 1
opts.bits = 1
return 0
_hx_min = 1
while (_hx_min < _hx_max):
_this = count.bytes
pos = (((_hx_min << 1)) + count.byteOffset)
if (((_this.b[pos] | ((_this.b[(pos + 1)] << 8)))) != 0):
break
_hx_min = (_hx_min + 1)
if (root < _hx_min):
root = _hx_min
left = 1
_hx_len = 1
while (_hx_len <= pako_zlib_InfTrees.MAXBITS):
left = (left << 1)
_this = count.bytes
pos = (((_hx_len << 1)) + count.byteOffset)
left = (left - ((_this.b[pos] | ((_this.b[(pos + 1)] << 8)))))
if (left < 0):
return -1
_hx_len = (_hx_len + 1)
if ((left > 0) and (((_hx_type == pako_zlib_InfTrees.CODES) or ((_hx_max != 1))))):
return -1
if (1 < ((offs.byteLength >> 1))):
_this = offs.bytes
pos = (2 + offs.byteOffset)
_this.b[pos] = 0
_this.b[(pos + 1)] = 0
_hx_len = 1
while (_hx_len < pako_zlib_InfTrees.MAXBITS):
index = (_hx_len + 1)
_this = offs.bytes
pos = (((_hx_len << 1)) + offs.byteOffset)
value = (_this.b[pos] | ((_this.b[(pos + 1)] << 8)))
_this1 = count.bytes
pos1 = (((_hx_len << 1)) + count.byteOffset)
value1 = (value + ((_this1.b[pos1] | ((_this1.b[(pos1 + 1)] << 8)))))
if ((index >= 0) and ((index < ((offs.byteLength >> 1))))):
_this2 = offs.bytes
pos2 = (((index << 1)) + offs.byteOffset)
_this2.b[pos2] = (value1 & 255)
_this2.b[(pos2 + 1)] = ((value1 >> 8) & 255)
_hx_len = (_hx_len + 1)
sym = 0
while (sym < codes):
_this = lens.bytes
pos = ((((lens_index + sym) << 1)) + lens.byteOffset)
if (((_this.b[pos] | ((_this.b[(pos + 1)] << 8)))) != 0):
_this1 = lens.bytes
pos1 = ((((lens_index + sym) << 1)) + lens.byteOffset)
index = (_this1.b[pos1] | ((_this1.b[(pos1 + 1)] << 8)))
_this2 = offs.bytes
pos2 = (((index << 1)) + offs.byteOffset)
index1 = (_this2.b[pos2] | ((_this2.b[(pos2 + 1)] << 8)))
if ((index1 >= 0) and ((index1 < ((work.byteLength >> 1))))):
_this3 = work.bytes
pos3 = (((index1 << 1)) + work.byteOffset)
_this3.b[pos3] = (sym & 255)
_this3.b[(pos3 + 1)] = ((sym >> 8) & 255)
_this4 = lens.bytes
pos4 = ((((lens_index + sym) << 1)) + lens.byteOffset)
_g = (_this4.b[pos4] | ((_this4.b[(pos4 + 1)] << 8)))
_g1 = offs
_this5 = _g1.bytes
pos5 = (((_g << 1)) + _g1.byteOffset)
value = (((_this5.b[pos5] | ((_this5.b[(pos5 + 1)] << 8)))) + 1)
if ((_g >= 0) and ((_g < ((_g1.byteLength >> 1))))):
_this6 = _g1.bytes
pos6 = (((_g << 1)) + _g1.byteOffset)
_this6.b[pos6] = (value & 255)
_this6.b[(pos6 + 1)] = ((value >> 8) & 255)
sym = (sym + 1)
if (_hx_type == pako_zlib_InfTrees.CODES):
extra = work
base = extra
end = 19
elif (_hx_type == pako_zlib_InfTrees.LENS):
base = pako_zlib_InfTrees.lbase
base_index = (base_index - 257)
extra = pako_zlib_InfTrees.lext
extra_index = (extra_index - 257)
end = 256
else:
base = pako_zlib_InfTrees.dbase
extra = pako_zlib_InfTrees.dext
end = -1
huff = 0
sym = 0
_hx_len = _hx_min
next = table_index
curr = root
drop = 0
low = -1
used = (1 << root)
mask = (used - 1)
if (((_hx_type == pako_zlib_InfTrees.LENS) and ((used > pako_zlib_InfTrees.ENOUGH_LENS))) or (((_hx_type == pako_zlib_InfTrees.DISTS) and ((used > pako_zlib_InfTrees.ENOUGH_DISTS))))):
return 1
i = 0
while True:
i = (i + 1)
here_bits = (_hx_len - drop)
_this = work.bytes
pos = (((sym << 1)) + work.byteOffset)
if (((_this.b[pos] | ((_this.b[(pos + 1)] << 8)))) < end):
here_op = 0
_this1 = work.bytes
pos1 = (((sym << 1)) + work.byteOffset)
here_val = (_this1.b[pos1] | ((_this1.b[(pos1 + 1)] << 8)))
else:
_this2 = work.bytes
pos2 = (((sym << 1)) + work.byteOffset)
if (((_this2.b[pos2] | ((_this2.b[(pos2 + 1)] << 8)))) > end):
_this3 = work.bytes
pos3 = (((sym << 1)) + work.byteOffset)
index = (extra_index + ((_this3.b[pos3] | ((_this3.b[(pos3 + 1)] << 8)))))
_this4 = extra.bytes
pos4 = (((index << 1)) + extra.byteOffset)
here_op = (_this4.b[pos4] | ((_this4.b[(pos4 + 1)] << 8)))
_this5 = work.bytes
pos5 = (((sym << 1)) + work.byteOffset)
index1 = (base_index + ((_this5.b[pos5] | ((_this5.b[(pos5 + 1)] << 8)))))
_this6 = base.bytes
pos6 = (((index1 << 1)) + base.byteOffset)
here_val = (_this6.b[pos6] | ((_this6.b[(pos6 + 1)] << 8)))
else:
here_op = 96
here_val = 0
incr = (1 << ((_hx_len - drop)))
fill = (1 << curr)
_hx_min = fill
while True:
fill = (fill - incr)
index2 = ((next + ((huff >> drop))) + fill)
value = ((((here_bits << 24) | ((here_op << 16))) | here_val) | 0)
if ((index2 >= 0) and ((index2 < ((table.byteLength >> 2))))):
_this7 = table.bytes
pos7 = (((index2 << 2)) + table.byteOffset)
_this7.b[pos7] = (value & 255)
_this7.b[(pos7 + 1)] = ((value >> 8) & 255)
_this7.b[(pos7 + 2)] = ((value >> 16) & 255)
_this7.b[(pos7 + 3)] = (HxOverrides.rshift(value, 24) & 255)
if (not ((fill != 0))):
break
incr = (1 << ((_hx_len - 1)))
while (((huff & incr)) != 0):
incr = (incr >> 1)
if (incr != 0):
huff = (huff & ((incr - 1)))
huff = (huff + incr)
else:
huff = 0
sym = (sym + 1)
_g = _hx_len
_g1 = count
_this8 = _g1.bytes
pos8 = (((_g << 1)) + _g1.byteOffset)
value1 = (((_this8.b[pos8] | ((_this8.b[(pos8 + 1)] << 8)))) - 1)
if ((_g >= 0) and ((_g < ((_g1.byteLength >> 1))))):
_this9 = _g1.bytes
pos9 = (((_g << 1)) + _g1.byteOffset)
_this9.b[pos9] = (value1 & 255)
_this9.b[(pos9 + 1)] = ((value1 >> 8) & 255)
_this10 = count.bytes
pos10 = (((_hx_len << 1)) + count.byteOffset)
if (((_this10.b[pos10] | ((_this10.b[(pos10 + 1)] << 8)))) == 0):
if (_hx_len == _hx_max):
break
_this11 = work.bytes
pos11 = (((sym << 1)) + work.byteOffset)
index3 = (lens_index + ((_this11.b[pos11] | ((_this11.b[(pos11 + 1)] << 8)))))
_this12 = lens.bytes
pos12 = (((index3 << 1)) + lens.byteOffset)
_hx_len = (_this12.b[pos12] | ((_this12.b[(pos12 + 1)] << 8)))
if ((_hx_len > root) and ((((huff & mask)) != low))):
if (drop == 0):
drop = root
next = (next + _hx_min)
curr = (_hx_len - drop)
left = (1 << curr)
while ((curr + drop) < _hx_max):
_this13 = count.bytes
pos13 = ((((curr + drop) << 1)) + count.byteOffset)
left = (left - ((_this13.b[pos13] | ((_this13.b[(pos13 + 1)] << 8)))))
if (left <= 0):
break
curr = (curr + 1)
left = (left << 1)
used = (used + ((1 << curr)))
if (((_hx_type == pako_zlib_InfTrees.LENS) and ((used > pako_zlib_InfTrees.ENOUGH_LENS))) or (((_hx_type == pako_zlib_InfTrees.DISTS) and ((used > pako_zlib_InfTrees.ENOUGH_DISTS))))):
return 1
low = (huff & mask)
value2 = ((((root << 24) | ((curr << 16))) | ((next - table_index))) | 0)
if ((low >= 0) and ((low < ((table.byteLength >> 2))))):
_this14 = table.bytes
pos14 = (((low << 2)) + table.byteOffset)
_this14.b[pos14] = (value2 & 255)
_this14.b[(pos14 + 1)] = ((value2 >> 8) & 255)
_this14.b[(pos14 + 2)] = ((value2 >> 16) & 255)
_this14.b[(pos14 + 3)] = (HxOverrides.rshift(value2, 24) & 255)
if (huff != 0):
index = (next + huff)
value = ((((_hx_len - drop) << 24) | 4194304) | 0)
if ((index >= 0) and ((index < ((table.byteLength >> 2))))):
_this = table.bytes
pos = (((index << 2)) + table.byteOffset)
_this.b[pos] = (value & 255)
_this.b[(pos + 1)] = ((value >> 8) & 255)
_this.b[(pos + 2)] = ((value >> 16) & 255)
_this.b[(pos + 3)] = (HxOverrides.rshift(value, 24) & 255)
opts.bits = root
return 0
pako_zlib_InfTrees._hx_class = pako_zlib_InfTrees
_hx_classes["pako.zlib.InfTrees"] = pako_zlib_InfTrees
class pako_zlib_Inflate:
_hx_class_name = "pako.zlib.Inflate"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["inflateResetKeep", "inflateReset", "inflateReset2", "inflateInit2", "virgin", "lenfix", "distfix", "fixedtables", "updatewindow", "inflate", "inflateEnd", "inflateGetHeader", "inflateSetDictionary"]
@staticmethod
def inflateResetKeep(strm):
if ((strm is None) or ((strm.inflateState is None))):
return -2
state = strm.inflateState
def _hx_local_1():
def _hx_local_0():
state.total = 0
return state.total
strm.total_out = _hx_local_0()
return strm.total_out
strm.total_in = _hx_local_1()
strm.msg = ""
if (state.wrap != 0):
strm.adler = (state.wrap & 1)
state.mode = 1
state.last = False
state.havedict = False
state.dmax = 32768
state.head = None
state.hold = 0
state.bits = 0
size = 3408
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
def _hx_local_2():
state.lendyn = this2
return state.lendyn
state.lencode = _hx_local_2()
size = 2368
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
def _hx_local_3():
state.distdyn = this2
return state.distdyn
state.distcode = _hx_local_3()
state.sane = 1
state.back = -1
return 0
@staticmethod
def inflateReset(strm):
if ((strm is None) or ((strm.inflateState is None))):
return -2
state = strm.inflateState
state.wsize = 0
state.whave = 0
state.wnext = 0
return pako_zlib_Inflate.inflateResetKeep(strm)
@staticmethod
def inflateReset2(strm,windowBits):
wrap = None
if ((strm is None) or ((strm.inflateState is None))):
return -2
state = strm.inflateState
if (windowBits < 0):
wrap = 0
windowBits = -windowBits
else:
wrap = (((windowBits >> 4)) + 1)
if (windowBits < 48):
windowBits = (windowBits & 15)
if ((windowBits != 0) and (((windowBits < 8) or ((windowBits > 15))))):
return -2
if ((state.window is not None) and ((state.wbits != windowBits))):
state.window = None
state.wrap = wrap
state.wbits = windowBits
return pako_zlib_Inflate.inflateReset(strm)
@staticmethod
def inflateInit2(strm,windowBits):
if (strm is None):
return -2
state = pako_zlib_InflateState()
strm.inflateState = state
state.window = None
ret = pako_zlib_Inflate.inflateReset2(strm,windowBits)
if (ret != 0):
strm.inflateState = None
return ret
@staticmethod
def fixedtables(state):
if pako_zlib_Inflate.virgin:
size = 2048
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
pako_zlib_Inflate.lenfix = this2
size = 128
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this2 = this1
pako_zlib_Inflate.distfix = this2
sym = 0
while (sym < 144):
this1 = state.lens
index = sym
sym = (sym + 1)
if ((index >= 0) and ((index < ((this1.byteLength >> 1))))):
_this = this1.bytes
pos = (((index << 1)) + this1.byteOffset)
_this.b[pos] = 8
_this.b[(pos + 1)] = 0
while (sym < 256):
this1 = state.lens
index = sym
sym = (sym + 1)
if ((index >= 0) and ((index < ((this1.byteLength >> 1))))):
_this = this1.bytes
pos = (((index << 1)) + this1.byteOffset)
_this.b[pos] = 9
_this.b[(pos + 1)] = 0
while (sym < 280):
this1 = state.lens
index = sym
sym = (sym + 1)
if ((index >= 0) and ((index < ((this1.byteLength >> 1))))):
_this = this1.bytes
pos = (((index << 1)) + this1.byteOffset)
_this.b[pos] = 7
_this.b[(pos + 1)] = 0
while (sym < 288):
this1 = state.lens
index = sym
sym = (sym + 1)
if ((index >= 0) and ((index < ((this1.byteLength >> 1))))):
_this = this1.bytes
pos = (((index << 1)) + this1.byteOffset)
_this.b[pos] = 8
_this.b[(pos + 1)] = 0
pako_zlib_InfTrees.inflate_table(1,state.lens,0,288,pako_zlib_Inflate.lenfix,0,state.work,_hx_AnonObject({'bits': 9}))
sym = 0
while (sym < 32):
this1 = state.lens
index = sym
sym = (sym + 1)
if ((index >= 0) and ((index < ((this1.byteLength >> 1))))):
_this = this1.bytes
pos = (((index << 1)) + this1.byteOffset)
_this.b[pos] = 5
_this.b[(pos + 1)] = 0
pako_zlib_InfTrees.inflate_table(2,state.lens,0,32,pako_zlib_Inflate.distfix,0,state.work,_hx_AnonObject({'bits': 5}))
pako_zlib_Inflate.virgin = False
state.lencode = pako_zlib_Inflate.lenfix
state.lenbits = 9
state.distcode = pako_zlib_Inflate.distfix
state.distbits = 5
@staticmethod
def updatewindow(strm,src,end,copy):
dist = None
state = strm.inflateState
if (state.window is None):
state.wsize = (1 << state.wbits)
state.wnext = 0
state.whave = 0
elements = state.wsize
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(elements),0,elements)
this2 = this1
state.window = this2
if (copy >= state.wsize):
dest = state.window
src1 = src
dest.bytes.blit(dest.byteOffset,src1.bytes,(src1.byteOffset + ((end - state.wsize))),state.wsize)
state.wnext = 0
state.whave = state.wsize
else:
dist = (state.wsize - state.wnext)
if (dist > copy):
dist = copy
dest = state.window
src1 = src
dest.bytes.blit((dest.byteOffset + state.wnext),src1.bytes,(src1.byteOffset + ((end - copy))),dist)
copy = (copy - dist)
if (copy != 0):
dest = state.window
src1 = src
dest.bytes.blit(dest.byteOffset,src1.bytes,(src1.byteOffset + ((end - copy))),copy)
state.wnext = copy
state.whave = state.wsize
else:
state.wnext = (state.wnext + dist)
if (state.wnext == state.wsize):
state.wnext = 0
if (state.whave < state.wsize):
state.whave = (state.whave + dist)
return 0
@staticmethod
def inflate(strm = None,flush = None):
hold = 0
bits = 0
copy = 0
_hx_from = None
from_source = None
here = 0
here_bits = 0
here_op = 0
here_val = 0
last_bits = None
last_op = None
last_val = None
_hx_len = 0
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(4),0,4)
this2 = this1
hbuf = this2
opts = None
n = None
order = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]
if ((((strm is None) or ((strm.inflateState is None))) or ((strm.output is None))) or (((strm.input is None) and ((strm.avail_in != 0))))):
return -2
state = strm.inflateState
if (state.mode == 12):
state.mode = 13
put = strm.next_out
output = strm.output
left = strm.avail_out
next = strm.next_in
input = strm.input
have = strm.avail_in
hold = state.hold
bits = state.bits
_in = have
_out = left
ret = 0
inf_leave = False
while (not inf_leave):
inf_leave = False
_g = state.mode
if (_g == 1):
if (state.wrap == 0):
state.mode = 13
continue
while (bits < 16):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
if ((((state.wrap & 2)) != 0) and ((hold == 35615))):
state.check = 0
value = (hold & 255)
if (0 < hbuf.byteLength):
hbuf.bytes.b[hbuf.byteOffset] = (value & 255)
value1 = (HxOverrides.rshift(hold, 8) & 255)
if (1 < hbuf.byteLength):
hbuf.bytes.b[(1 + hbuf.byteOffset)] = (value1 & 255)
state.check = pako_zlib_CRC32.crc32(state.check,hbuf,2,0)
hold = 0
bits = 0
state.mode = 2
continue
state.flags = 0
if (state.head is not None):
state.head.done = False
if ((((state.wrap & 1)) != 1) or ((HxOverrides.mod(((((((hold & 255)) << 8)) + ((hold >> 8)))), 31) != 0))):
strm.msg = "incorrect header check"
state.mode = 30
continue
if (((hold & 15)) != 8):
strm.msg = "unknown compression method"
state.mode = 30
continue
hold = HxOverrides.rshift(hold, 4)
bits = (bits - 4)
_hx_len = (((hold & 15)) + 8)
if (state.wbits == 0):
state.wbits = _hx_len
elif (_hx_len > state.wbits):
strm.msg = "invalid window size"
state.mode = 30
continue
state.dmax = (1 << _hx_len)
def _hx_local_5():
state.check = 1
return state.check
strm.adler = _hx_local_5()
state.mode = (10 if ((((hold & 512)) != 0)) else 12)
hold = 0
bits = 0
elif (_g == 2):
while (bits < 16):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index1 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index1 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
state.flags = hold
if (((state.flags & 255)) != 8):
strm.msg = "unknown compression method"
state.mode = 30
continue
if (((state.flags & 57344)) != 0):
strm.msg = "unknown header flags set"
state.mode = 30
continue
if (state.head is not None):
state.head.text = ((((hold >> 8) & 1)) == 1)
if (((state.flags & 512)) != 0):
value2 = (hold & 255)
if (0 < hbuf.byteLength):
hbuf.bytes.b[hbuf.byteOffset] = (value2 & 255)
value3 = (HxOverrides.rshift(hold, 8) & 255)
if (1 < hbuf.byteLength):
hbuf.bytes.b[(1 + hbuf.byteOffset)] = (value3 & 255)
state.check = pako_zlib_CRC32.crc32(state.check,hbuf,2,0)
hold = 0
bits = 0
state.mode = 3
elif (_g == 3):
while (bits < 32):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index2 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index2 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
if (state.head is not None):
state.head.time = hold
if (((state.flags & 512)) != 0):
value4 = (hold & 255)
if (0 < hbuf.byteLength):
hbuf.bytes.b[hbuf.byteOffset] = (value4 & 255)
value5 = (HxOverrides.rshift(hold, 8) & 255)
if (1 < hbuf.byteLength):
hbuf.bytes.b[(1 + hbuf.byteOffset)] = (value5 & 255)
value6 = (HxOverrides.rshift(hold, 16) & 255)
if (2 < hbuf.byteLength):
hbuf.bytes.b[(2 + hbuf.byteOffset)] = (value6 & 255)
value7 = (HxOverrides.rshift(hold, 24) & 255)
if (3 < hbuf.byteLength):
hbuf.bytes.b[(3 + hbuf.byteOffset)] = (value7 & 255)
state.check = pako_zlib_CRC32.crc32(state.check,hbuf,4,0)
hold = 0
bits = 0
state.mode = 4
elif (_g == 4):
while (bits < 16):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index3 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index3 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
if (state.head is not None):
state.head.xflags = (hold & 255)
state.head.os = (hold >> 8)
if (((state.flags & 512)) != 0):
value8 = (hold & 255)
if (0 < hbuf.byteLength):
hbuf.bytes.b[hbuf.byteOffset] = (value8 & 255)
value9 = (HxOverrides.rshift(hold, 8) & 255)
if (1 < hbuf.byteLength):
hbuf.bytes.b[(1 + hbuf.byteOffset)] = (value9 & 255)
state.check = pako_zlib_CRC32.crc32(state.check,hbuf,2,0)
hold = 0
bits = 0
state.mode = 5
elif (_g == 5):
if (((state.flags & 1024)) != 0):
while (bits < 16):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index4 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index4 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
state.length = hold
if (state.head is not None):
state.head.extra_len = hold
if (((state.flags & 512)) != 0):
value10 = (hold & 255)
if (0 < hbuf.byteLength):
hbuf.bytes.b[hbuf.byteOffset] = (value10 & 255)
value11 = (HxOverrides.rshift(hold, 8) & 255)
if (1 < hbuf.byteLength):
hbuf.bytes.b[(1 + hbuf.byteOffset)] = (value11 & 255)
state.check = pako_zlib_CRC32.crc32(state.check,hbuf,2,0)
hold = 0
bits = 0
elif (state.head is not None):
state.head.extra = None
state.mode = 6
elif (_g == 6):
if (((state.flags & 1024)) != 0):
copy = state.length
if (copy > have):
copy = have
if (copy != 0):
if (state.head is not None):
_hx_len = (state.head.extra_len - state.length)
if (state.head.extra is None):
elements = state.head.extra_len
this1 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(elements),0,elements)
this2 = this1
state.head.extra = this2
dest = state.head.extra
src = input
dest.bytes.blit((dest.byteOffset + _hx_len),src.bytes,(src.byteOffset + next),copy)
if (((state.flags & 512)) != 0):
state.check = pako_zlib_CRC32.crc32(state.check,input,copy,next)
have = (have - copy)
next = (next + copy)
state.length = (state.length - copy)
if (state.length != 0):
inf_leave = True
break
state.length = 0
state.mode = 7
elif (_g == 7):
if (((state.flags & 2048)) != 0):
if (have == 0):
inf_leave = True
break
copy = 0
while True:
index5 = copy
copy = (copy + 1)
_hx_len = input.bytes.b[((next + index5) + input.byteOffset)]
if (((state.head is not None) and ((_hx_len != 0))) and ((state.length < 65536))):
_hx_local_21 = state.head
_hx_local_22 = _hx_local_21.name
_hx_local_21.name = (("null" if _hx_local_22 is None else _hx_local_22) + HxOverrides.stringOrNull("".join(map(chr,[_hx_len]))))
_hx_local_21.name
if (not (((_hx_len != 0) and ((copy < have))))):
break
if (((state.flags & 512)) != 0):
state.check = pako_zlib_CRC32.crc32(state.check,input,copy,next)
have = (have - copy)
next = (next + copy)
if (_hx_len != 0):
inf_leave = True
break
elif (state.head is not None):
state.head.name = None
state.length = 0
state.mode = 8
elif (_g == 8):
if (((state.flags & 4096)) != 0):
if (have == 0):
inf_leave = True
break
copy = 0
while True:
index6 = copy
copy = (copy + 1)
_hx_len = input.bytes.b[((next + index6) + input.byteOffset)]
if (((state.head is not None) and ((_hx_len != 0))) and ((state.length < 65536))):
_hx_local_25 = state.head
_hx_local_26 = _hx_local_25.comment
_hx_local_25.comment = (("null" if _hx_local_26 is None else _hx_local_26) + HxOverrides.stringOrNull("".join(map(chr,[_hx_len]))))
_hx_local_25.comment
if (not (((_hx_len != 0) and ((copy < have))))):
break
if (((state.flags & 512)) != 0):
state.check = pako_zlib_CRC32.crc32(state.check,input,copy,next)
have = (have - copy)
next = (next + copy)
if (_hx_len != 0):
inf_leave = True
break
elif (state.head is not None):
state.head.comment = None
state.mode = 9
elif (_g == 9):
if (((state.flags & 512)) != 0):
while (bits < 16):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index7 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index7 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
if (hold != ((state.check & 65535))):
strm.msg = "header crc mismatch"
state.mode = 30
continue
hold = 0
bits = 0
if (state.head is not None):
state.head.hcrc = ((state.flags >> 9) & 1)
state.head.done = True
def _hx_local_32():
state.check = 0
return state.check
strm.adler = _hx_local_32()
state.mode = 12
elif (_g == 10):
while (bits < 32):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index8 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index8 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
def _hx_local_36():
state.check = (((((HxOverrides.rshift(hold, 24) & 255)) + ((HxOverrides.rshift(hold, 8) & 65280))) + ((((hold & 65280)) << 8))) + ((((hold & 255)) << 24)))
return state.check
strm.adler = _hx_local_36()
hold = 0
bits = 0
state.mode = 11
elif (_g == 11):
if (not state.havedict):
strm.next_out = put
strm.avail_out = left
strm.next_in = next
strm.avail_in = have
state.hold = hold
state.bits = bits
return 2
def _hx_local_37():
state.check = 1
return state.check
strm.adler = _hx_local_37()
state.mode = 12
elif (_g == 12):
if ((flush == 5) or ((flush == 6))):
continue
state.mode = 13
elif (_g == 13):
if state.last:
hold = HxOverrides.rshift(hold, ((bits & 7)))
bits = (bits - ((bits & 7)))
state.mode = 27
continue
while (bits < 3):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index9 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index9 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
state.last = (((hold & 1)) == 1)
hold = HxOverrides.rshift(hold, 1)
bits = (bits - 1)
_g1 = (hold & 3)
if (_g1 == 0):
state.mode = 14
elif (_g1 == 1):
pako_zlib_Inflate.fixedtables(state)
state.mode = 20
if (flush == 6):
hold = HxOverrides.rshift(hold, 2)
bits = (bits - 2)
inf_leave = True
break
elif (_g1 == 2):
state.mode = 17
elif (_g1 == 3):
strm.msg = "invalid block type"
state.mode = 30
else:
pass
hold = HxOverrides.rshift(hold, 2)
bits = (bits - 2)
elif (_g == 14):
hold = HxOverrides.rshift(hold, ((bits & 7)))
bits = (bits - ((bits & 7)))
while (bits < 32):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index10 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index10 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
if (((hold & 65535)) != ((HxOverrides.rshift(hold, 16) ^ 65535))):
strm.msg = "invalid stored block lengths"
state.mode = 30
continue
state.length = (hold & 65535)
hold = 0
bits = 0
state.mode = 15
if (flush == 6):
inf_leave = True
break
elif (_g == 15):
state.mode = 16
elif (_g == 16):
copy = state.length
if (copy != 0):
if (copy > have):
copy = have
if (copy > left):
copy = left
if (copy == 0):
inf_leave = True
break
dest1 = output
src1 = input
dest1.bytes.blit((dest1.byteOffset + put),src1.bytes,(src1.byteOffset + next),copy)
have = (have - copy)
next = (next + copy)
left = (left - copy)
put = (put + copy)
state.length = (state.length - copy)
continue
state.mode = 12
elif (_g == 17):
while (bits < 14):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index11 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index11 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
state.nlen = (((hold & 31)) + 257)
hold = HxOverrides.rshift(hold, 5)
bits = (bits - 5)
state.ndist = (((hold & 31)) + 1)
hold = HxOverrides.rshift(hold, 5)
bits = (bits - 5)
state.ncode = (((hold & 15)) + 4)
hold = HxOverrides.rshift(hold, 4)
bits = (bits - 4)
if ((state.nlen > 286) or ((state.ndist > 30))):
strm.msg = "too many length or distance symbols"
state.mode = 30
continue
state.have = 0
state.mode = 18
elif (_g == 18):
while (state.have < state.ncode):
while (bits < 3):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index12 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index12 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
this3 = state.lens
def _hx_local_72():
_hx_local_71 = state.have
state.have = (state.have + 1)
return _hx_local_71
index13 = python_internal_ArrayImpl._get(order, _hx_local_72())
value12 = (hold & 7)
if ((index13 >= 0) and ((index13 < ((this3.byteLength >> 1))))):
_this = this3.bytes
pos = (((index13 << 1)) + this3.byteOffset)
_this.b[pos] = (value12 & 255)
_this.b[(pos + 1)] = ((value12 >> 8) & 255)
hold = HxOverrides.rshift(hold, 3)
bits = (bits - 3)
if inf_leave:
break
while (state.have < 19):
this4 = state.lens
def _hx_local_76():
_hx_local_75 = state.have
state.have = (state.have + 1)
return _hx_local_75
index14 = python_internal_ArrayImpl._get(order, _hx_local_76())
if ((index14 >= 0) and ((index14 < ((this4.byteLength >> 1))))):
_this1 = this4.bytes
pos1 = (((index14 << 1)) + this4.byteOffset)
_this1.b[pos1] = 0
_this1.b[(pos1 + 1)] = 0
state.lencode = state.lendyn
state.lenbits = 7
opts = _hx_AnonObject({'bits': state.lenbits})
ret = pako_zlib_InfTrees.inflate_table(0,state.lens,0,19,state.lencode,0,state.work,opts)
state.lenbits = opts.bits
if (ret != 0):
strm.msg = "invalid code lengths set"
state.mode = 30
continue
state.have = 0
state.mode = 19
elif (_g == 19):
while (state.have < ((state.nlen + state.ndist))):
while True:
this5 = state.lencode
_this2 = this5.bytes
pos2 = (((((hold & ((((1 << state.lenbits)) - 1)))) << 2)) + this5.byteOffset)
v = (((_this2.b[pos2] | ((_this2.b[(pos2 + 1)] << 8))) | ((_this2.b[(pos2 + 2)] << 16))) | ((_this2.b[(pos2 + 3)] << 24)))
here = ((v | -2147483648) if ((((v & -2147483648)) != 0)) else v)
here_bits = HxOverrides.rshift(here, 24)
here_op = (HxOverrides.rshift(here, 16) & 255)
here_val = (here & 65535)
if (here_bits <= bits):
break
if (have == 0):
inf_leave = True
break
have = (have - 1)
index15 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index15 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
if (here_val < 16):
hold = HxOverrides.rshift(hold, here_bits)
bits = (bits - here_bits)
this6 = state.lens
def _hx_local_83():
_hx_local_82 = state.have
state.have = (state.have + 1)
return _hx_local_82
index16 = _hx_local_83()
if ((index16 >= 0) and ((index16 < ((this6.byteLength >> 1))))):
_this3 = this6.bytes
pos3 = (((index16 << 1)) + this6.byteOffset)
_this3.b[pos3] = (here_val & 255)
_this3.b[(pos3 + 1)] = ((here_val >> 8) & 255)
else:
if (here_val == 16):
n = (here_bits + 2)
while (bits < n):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index17 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index17 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
hold = HxOverrides.rshift(hold, here_bits)
bits = (bits - here_bits)
if (state.have == 0):
strm.msg = "invalid bit length repeat"
state.mode = 30
break
this7 = state.lens
_this4 = this7.bytes
pos4 = ((((state.have - 1) << 1)) + this7.byteOffset)
_hx_len = (_this4.b[pos4] | ((_this4.b[(pos4 + 1)] << 8)))
copy = (3 + ((hold & 3)))
hold = HxOverrides.rshift(hold, 2)
bits = (bits - 2)
elif (here_val == 17):
n = (here_bits + 3)
while (bits < n):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index18 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index18 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
hold = HxOverrides.rshift(hold, here_bits)
bits = (bits - here_bits)
_hx_len = 0
copy = (3 + ((hold & 7)))
hold = HxOverrides.rshift(hold, 3)
bits = (bits - 3)
else:
n = (here_bits + 7)
while (bits < n):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index19 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index19 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
break
hold = HxOverrides.rshift(hold, here_bits)
bits = (bits - here_bits)
_hx_len = 0
copy = (11 + ((hold & 127)))
hold = HxOverrides.rshift(hold, 7)
bits = (bits - 7)
if ((state.have + copy) > ((state.nlen + state.ndist))):
strm.msg = "invalid bit length repeat"
state.mode = 30
break
while True:
tmp = copy
copy = (copy - 1)
if (not ((tmp != 0))):
break
this8 = state.lens
def _hx_local_106():
_hx_local_105 = state.have
state.have = (state.have + 1)
return _hx_local_105
index20 = _hx_local_106()
if ((index20 >= 0) and ((index20 < ((this8.byteLength >> 1))))):
_this5 = this8.bytes
pos5 = (((index20 << 1)) + this8.byteOffset)
_this5.b[pos5] = (_hx_len & 255)
_this5.b[(pos5 + 1)] = ((_hx_len >> 8) & 255)
if (inf_leave or ((state.mode == 30))):
continue
this9 = state.lens
_this6 = this9.bytes
pos6 = (512 + this9.byteOffset)
if (((_this6.b[pos6] | ((_this6.b[(pos6 + 1)] << 8)))) == 0):
strm.msg = "invalid code -- missing end-of-block"
state.mode = 30
continue
state.lenbits = 9
opts = _hx_AnonObject({'bits': state.lenbits})
ret = pako_zlib_InfTrees.inflate_table(1,state.lens,0,state.nlen,state.lencode,0,state.work,opts)
state.lenbits = opts.bits
if (ret != 0):
strm.msg = "invalid literal/lengths set"
state.mode = 30
continue
state.distbits = 6
state.distcode = state.distdyn
opts = _hx_AnonObject({'bits': state.distbits})
ret = pako_zlib_InfTrees.inflate_table(2,state.lens,state.nlen,state.ndist,state.distcode,0,state.work,opts)
state.distbits = opts.bits
if (ret != 0):
strm.msg = "invalid distances set"
state.mode = 30
continue
state.mode = 20
if (flush == 6):
inf_leave = True
continue
elif (_g == 20):
state.mode = 21
elif (_g == 21):
if ((have >= 6) and ((left >= 258))):
strm.next_out = put
strm.avail_out = left
strm.next_in = next
strm.avail_in = have
state.hold = hold
state.bits = bits
pako_zlib_InfFast.inflate_fast(strm,_out)
put = strm.next_out
output = strm.output
left = strm.avail_out
next = strm.next_in
input = strm.input
have = strm.avail_in
hold = state.hold
bits = state.bits
if (state.mode == 12):
state.back = -1
continue
state.back = 0
while True:
this10 = state.lencode
_this7 = this10.bytes
pos7 = (((((hold & ((((1 << state.lenbits)) - 1)))) << 2)) + this10.byteOffset)
v1 = (((_this7.b[pos7] | ((_this7.b[(pos7 + 1)] << 8))) | ((_this7.b[(pos7 + 2)] << 16))) | ((_this7.b[(pos7 + 3)] << 24)))
here = ((v1 | -2147483648) if ((((v1 & -2147483648)) != 0)) else v1)
here_bits = HxOverrides.rshift(here, 24)
here_op = (HxOverrides.rshift(here, 16) & 255)
here_val = (here & 65535)
if (here_bits <= bits):
break
if (have == 0):
inf_leave = True
break
have = (have - 1)
index21 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index21 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
if ((here_op != 0) and ((((here_op & 240)) == 0))):
last_bits = here_bits
last_op = here_op
last_val = here_val
while True:
this11 = state.lencode
_this8 = this11.bytes
pos8 = ((((last_val + ((((hold & ((((1 << ((last_bits + last_op)))) - 1)))) >> last_bits))) << 2)) + this11.byteOffset)
v2 = (((_this8.b[pos8] | ((_this8.b[(pos8 + 1)] << 8))) | ((_this8.b[(pos8 + 2)] << 16))) | ((_this8.b[(pos8 + 3)] << 24)))
here = ((v2 | -2147483648) if ((((v2 & -2147483648)) != 0)) else v2)
here_bits = HxOverrides.rshift(here, 24)
here_op = (HxOverrides.rshift(here, 16) & 255)
here_val = (here & 65535)
if ((last_bits + here_bits) <= bits):
break
if (have == 0):
inf_leave = True
break
have = (have - 1)
index22 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index22 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
hold = HxOverrides.rshift(hold, last_bits)
bits = (bits - last_bits)
state.back = (state.back + last_bits)
hold = HxOverrides.rshift(hold, here_bits)
bits = (bits - here_bits)
state.back = (state.back + here_bits)
state.length = here_val
if (here_op == 0):
state.mode = 26
continue
if (((here_op & 32)) != 0):
state.back = -1
state.mode = 12
continue
if (((here_op & 64)) != 0):
strm.msg = "invalid literal/length code"
state.mode = 30
continue
state.extra = (here_op & 15)
state.mode = 22
elif (_g == 22):
if (state.extra != 0):
n = state.extra
while (bits < n):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index23 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index23 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
state.length = (state.length + ((hold & ((((1 << state.extra)) - 1)))))
hold = HxOverrides.rshift(hold, state.extra)
bits = (bits - state.extra)
state.back = (state.back + state.extra)
state.was = state.length
state.mode = 23
elif (_g == 23):
while True:
this12 = state.distcode
_this9 = this12.bytes
pos9 = (((((hold & ((((1 << state.distbits)) - 1)))) << 2)) + this12.byteOffset)
v3 = (((_this9.b[pos9] | ((_this9.b[(pos9 + 1)] << 8))) | ((_this9.b[(pos9 + 2)] << 16))) | ((_this9.b[(pos9 + 3)] << 24)))
here = ((v3 | -2147483648) if ((((v3 & -2147483648)) != 0)) else v3)
here_bits = HxOverrides.rshift(here, 24)
here_op = (HxOverrides.rshift(here, 16) & 255)
here_val = (here & 65535)
if (here_bits <= bits):
break
if (have == 0):
inf_leave = True
break
have = (have - 1)
index24 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index24 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
if (((here_op & 240)) == 0):
last_bits = here_bits
last_op = here_op
last_val = here_val
while True:
this13 = state.distcode
_this10 = this13.bytes
pos10 = ((((last_val + ((((hold & ((((1 << ((last_bits + last_op)))) - 1)))) >> last_bits))) << 2)) + this13.byteOffset)
v4 = (((_this10.b[pos10] | ((_this10.b[(pos10 + 1)] << 8))) | ((_this10.b[(pos10 + 2)] << 16))) | ((_this10.b[(pos10 + 3)] << 24)))
here = ((v4 | -2147483648) if ((((v4 & -2147483648)) != 0)) else v4)
here_bits = HxOverrides.rshift(here, 24)
here_op = (HxOverrides.rshift(here, 16) & 255)
here_val = (here & 65535)
if ((last_bits + here_bits) <= bits):
break
if (have == 0):
inf_leave = True
break
have = (have - 1)
index25 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index25 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
hold = HxOverrides.rshift(hold, last_bits)
bits = (bits - last_bits)
state.back = (state.back + last_bits)
hold = HxOverrides.rshift(hold, here_bits)
bits = (bits - here_bits)
state.back = (state.back + here_bits)
if (((here_op & 64)) != 0):
strm.msg = "invalid distance code"
state.mode = 30
continue
state.offset = here_val
state.extra = (here_op & 15)
state.mode = 24
elif (_g == 24):
if (state.extra != 0):
n = state.extra
while (bits < n):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index26 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index26 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
state.offset = (state.offset + ((hold & ((((1 << state.extra)) - 1)))))
hold = HxOverrides.rshift(hold, state.extra)
bits = (bits - state.extra)
state.back = (state.back + state.extra)
if (state.offset > state.dmax):
strm.msg = "invalid distance too far back"
state.mode = 30
continue
state.mode = 25
elif (_g == 25):
if (left == 0):
inf_leave = True
continue
copy = (_out - left)
if (state.offset > copy):
copy = (state.offset - copy)
if (copy > state.whave):
if (state.sane != 0):
strm.msg = "invalid distance too far back"
state.mode = 30
continue
if (copy > state.wnext):
copy = (copy - state.wnext)
_hx_from = (state.wsize - copy)
else:
_hx_from = (state.wnext - copy)
if (copy > state.length):
copy = state.length
from_source = state.window
else:
from_source = output
_hx_from = (put - state.offset)
copy = state.length
if (copy > left):
copy = left
left = (left - copy)
state.length = (state.length - copy)
while True:
index27 = put
put = (put + 1)
index28 = _hx_from
_hx_from = (_hx_from + 1)
value13 = from_source.bytes.b[(index28 + from_source.byteOffset)]
if ((index27 >= 0) and ((index27 < output.byteLength))):
output.bytes.b[(index27 + output.byteOffset)] = (value13 & 255)
copy = (copy - 1)
tmp1 = copy
if (not ((tmp1 != 0))):
break
if (state.length == 0):
state.mode = 21
elif (_g == 26):
if (left == 0):
inf_leave = True
continue
index29 = put
put = (put + 1)
value14 = state.length
if ((index29 >= 0) and ((index29 < output.byteLength))):
output.bytes.b[(index29 + output.byteOffset)] = (value14 & 255)
left = (left - 1)
state.mode = 21
elif (_g == 27):
if (state.wrap != 0):
while (bits < 32):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index30 = next
next = (next + 1)
hold = (hold | ((input.bytes.b[(index30 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
_out = (_out - left)
strm.total_out = (strm.total_out + _out)
state.total = (state.total + _out)
if (_out != 0):
def _hx_local_155():
state.check = (pako_zlib_CRC32.crc32(state.check,output,_out,(put - _out)) if ((state.flags != 0)) else pako_zlib_Adler32.adler32(state.check,output,_out,(put - _out)))
return state.check
strm.adler = _hx_local_155()
_out = left
hold = ((hold + (2 ** 31)) % (2 ** 32) - (2 ** 31))
if (((hold if ((state.flags != 0)) else (((((HxOverrides.rshift(hold, 24) & 255)) + ((HxOverrides.rshift(hold, 8) & 65280))) + ((((hold & 65280)) << 8))) + ((((hold & 255)) << 24))))) != state.check):
strm.msg = "incorrect data check"
state.mode = 30
continue
hold = 0
bits = 0
state.mode = 28
elif (_g == 28):
if ((state.wrap != 0) and ((state.flags != 0))):
while (bits < 32):
if (have == 0):
inf_leave = True
break
have = (have - 1)
index31 = next
next = (next + 1)
hold = (hold + ((input.bytes.b[(index31 + input.byteOffset)] << bits)))
bits = (bits + 8)
if inf_leave:
continue
if (hold != ((state.total & -1))):
strm.msg = "incorrect length check"
state.mode = 30
continue
hold = 0
bits = 0
state.mode = 29
elif (_g == 29):
ret = 1
inf_leave = True
continue
elif (_g == 30):
ret = -3
inf_leave = True
continue
elif (_g == 31):
return -4
elif (_g == 32):
return -2
else:
return -2
strm.next_out = put
strm.avail_out = left
strm.next_in = next
strm.avail_in = have
state.hold = hold
state.bits = bits
if ((state.wsize != 0) or ((((_out != strm.avail_out) and ((state.mode < 30))) and (((state.mode < 27) or ((flush != 4))))))):
if (pako_zlib_Inflate.updatewindow(strm,strm.output,strm.next_out,(_out - strm.avail_out)) != 0):
state.mode = 31
return -4
_in = (_in - strm.avail_in)
_out = (_out - strm.avail_out)
strm.total_in = (strm.total_in + _in)
strm.total_out = (strm.total_out + _out)
state.total = (state.total + _out)
if ((state.wrap != 0) and ((_out != 0))):
def _hx_local_164():
state.check = (pako_zlib_CRC32.crc32(state.check,output,_out,(strm.next_out - _out)) if ((state.flags != 0)) else pako_zlib_Adler32.adler32(state.check,output,_out,(strm.next_out - _out)))
return state.check
strm.adler = _hx_local_164()
strm.data_type = (((state.bits + ((64 if (state.last) else 0))) + ((128 if ((state.mode == 12)) else 0))) + ((256 if (((state.mode == 20) or ((state.mode == 15)))) else 0)))
if (((((_in == 0) and ((_out == 0))) or ((flush == 4)))) and ((ret == 0))):
ret = -5
return ret
@staticmethod
def inflateEnd(strm = None):
if ((strm is None) or ((strm.inflateState is None))):
return -2
state = strm.inflateState
if (state.window is not None):
state.window = None
strm.inflateState = None
return 0
@staticmethod
def inflateGetHeader(strm,head):
if ((strm is None) or ((strm.inflateState is None))):
return -2
state = strm.inflateState
if (((state.wrap & 2)) == 0):
return -2
state.head = head
head.done = False
return 0
@staticmethod
def inflateSetDictionary(strm,dictionary):
dictLength = dictionary.byteLength
dictid = None
if ((strm is None) or ((strm.inflateState is None))):
return -2
state = strm.inflateState
if ((state.wrap != 0) and ((state.mode != 11))):
return -2
if (state.mode == 11):
dictid = 1
dictid = pako_zlib_Adler32.adler32(dictid,dictionary,dictLength,0)
if (dictid != state.check):
return -3
ret = pako_zlib_Inflate.updatewindow(strm,dictionary,dictLength,dictLength)
if (ret != 0):
state.mode = 31
return -4
state.havedict = True
return 0
pako_zlib_Inflate._hx_class = pako_zlib_Inflate
_hx_classes["pako.zlib.Inflate"] = pako_zlib_Inflate
class pako_zlib_InflateState:
_hx_class_name = "pako.zlib.InflateState"
_hx_is_interface = "False"
__slots__ = ("mode", "last", "wrap", "havedict", "flags", "dmax", "check", "total", "head", "wbits", "wsize", "whave", "wnext", "window", "hold", "bits", "length", "offset", "extra", "lencode", "distcode", "lenbits", "distbits", "ncode", "nlen", "ndist", "have", "lens", "work", "lendyn", "distdyn", "sane", "back", "was")
_hx_fields = ["mode", "last", "wrap", "havedict", "flags", "dmax", "check", "total", "head", "wbits", "wsize", "whave", "wnext", "window", "hold", "bits", "length", "offset", "extra", "lencode", "distcode", "lenbits", "distbits", "ncode", "nlen", "ndist", "have", "lens", "work", "lendyn", "distdyn", "sane", "back", "was"]
def __init__(self):
self.was = 0
self.back = 0
self.sane = 0
self.distdyn = None
self.lendyn = None
size = 576
this3 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this1 = this3
self.work = this1
size = 640
this3 = haxe_io_ArrayBufferViewImpl(haxe_io_Bytes.alloc(size),0,size)
this1 = this3
self.lens = this1
self.have = 0
self.ndist = 0
self.nlen = 0
self.ncode = 0
self.distbits = 0
self.lenbits = 0
self.distcode = None
self.lencode = None
self.extra = 0
self.offset = 0
self.length = 0
self.bits = 0
self.hold = 0
self.window = None
self.wnext = 0
self.whave = 0
self.wsize = 0
self.wbits = 0
self.head = None
self.total = 0
self.check = 0
self.dmax = 0
self.flags = 0
self.havedict = False
self.wrap = 0
self.last = False
self.mode = 0
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.mode = None
_hx_o.last = None
_hx_o.wrap = None
_hx_o.havedict = None
_hx_o.flags = None
_hx_o.dmax = None
_hx_o.check = None
_hx_o.total = None
_hx_o.head = None
_hx_o.wbits = None
_hx_o.wsize = None
_hx_o.whave = None
_hx_o.wnext = None
_hx_o.window = None
_hx_o.hold = None
_hx_o.bits = None
_hx_o.length = None
_hx_o.offset = None
_hx_o.extra = None
_hx_o.lencode = None
_hx_o.distcode = None
_hx_o.lenbits = None
_hx_o.distbits = None
_hx_o.ncode = None
_hx_o.nlen = None
_hx_o.ndist = None
_hx_o.have = None
_hx_o.lens = None
_hx_o.work = None
_hx_o.lendyn = None
_hx_o.distdyn = None
_hx_o.sane = None
_hx_o.back = None
_hx_o.was = None
pako_zlib_InflateState._hx_class = pako_zlib_InflateState
_hx_classes["pako.zlib.InflateState"] = pako_zlib_InflateState
class pako_zlib_Messages:
_hx_class_name = "pako.zlib.Messages"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["map", "get"]
@staticmethod
def get(error):
return ("ERROR: " + HxOverrides.stringOrNull(pako_zlib_Messages.map.h.get(error,None)))
pako_zlib_Messages._hx_class = pako_zlib_Messages
_hx_classes["pako.zlib.Messages"] = pako_zlib_Messages
class pako_zlib_ZStream:
_hx_class_name = "pako.zlib.ZStream"
_hx_is_interface = "False"
__slots__ = ("input", "next_in", "avail_in", "total_in", "output", "next_out", "avail_out", "total_out", "msg", "inflateState", "data_type", "adler")
_hx_fields = ["input", "next_in", "avail_in", "total_in", "output", "next_out", "avail_out", "total_out", "msg", "inflateState", "data_type", "adler"]
def __init__(self):
self.adler = 0
self.data_type = 2
self.inflateState = None
self.msg = ""
self.total_out = 0
self.avail_out = 0
self.next_out = 0
self.output = None
self.total_in = 0
self.avail_in = 0
self.next_in = 0
self.input = None
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.input = None
_hx_o.next_in = None
_hx_o.avail_in = None
_hx_o.total_in = None
_hx_o.output = None
_hx_o.next_out = None
_hx_o.avail_out = None
_hx_o.total_out = None
_hx_o.msg = None
_hx_o.inflateState = None
_hx_o.data_type = None
_hx_o.adler = None
pako_zlib_ZStream._hx_class = pako_zlib_ZStream
_hx_classes["pako.zlib.ZStream"] = pako_zlib_ZStream
class python_Boot:
_hx_class_name = "python.Boot"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["keywords", "toString1", "fields", "simpleField", "hasField", "field", "getInstanceFields", "getSuperClass", "getClassFields", "prefixLength", "unhandleKeywords"]
@staticmethod
def toString1(o,s):
if (o is None):
return "null"
if isinstance(o,str):
return o
if (s is None):
s = ""
if (len(s) >= 5):
return "<...>"
if isinstance(o,bool):
if o:
return "true"
else:
return "false"
if (isinstance(o,int) and (not isinstance(o,bool))):
return str(o)
if isinstance(o,float):
try:
if (o == int(o)):
return str(Math.floor((o + 0.5)))
else:
return str(o)
except BaseException as _g:
None
return str(o)
if isinstance(o,list):
o1 = o
l = len(o1)
st = "["
s = (("null" if s is None else s) + "\t")
_g = 0
_g1 = l
while (_g < _g1):
i = _g
_g = (_g + 1)
prefix = ""
if (i > 0):
prefix = ","
st = (("null" if st is None else st) + HxOverrides.stringOrNull(((("null" if prefix is None else prefix) + HxOverrides.stringOrNull(python_Boot.toString1((o1[i] if i >= 0 and i < len(o1) else None),s))))))
st = (("null" if st is None else st) + "]")
return st
try:
if hasattr(o,"toString"):
return o.toString()
except BaseException as _g:
None
if hasattr(o,"__class__"):
if isinstance(o,_hx_AnonObject):
toStr = None
try:
fields = python_Boot.fields(o)
_g = []
_g1 = 0
while (_g1 < len(fields)):
f = (fields[_g1] if _g1 >= 0 and _g1 < len(fields) else None)
_g1 = (_g1 + 1)
x = ((("" + ("null" if f is None else f)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))))
_g.append(x)
fieldsStr = _g
toStr = (("{ " + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " }")
except BaseException as _g:
None
return "{ ... }"
if (toStr is None):
return "{ ... }"
else:
return toStr
if isinstance(o,Enum):
o1 = o
l = len(o1.params)
hasParams = (l > 0)
if hasParams:
paramsStr = ""
_g = 0
_g1 = l
while (_g < _g1):
i = _g
_g = (_g + 1)
prefix = ""
if (i > 0):
prefix = ","
paramsStr = (("null" if paramsStr is None else paramsStr) + HxOverrides.stringOrNull(((("null" if prefix is None else prefix) + HxOverrides.stringOrNull(python_Boot.toString1(o1.params[i],s))))))
return (((HxOverrides.stringOrNull(o1.tag) + "(") + ("null" if paramsStr is None else paramsStr)) + ")")
else:
return o1.tag
if hasattr(o,"_hx_class_name"):
if (o.__class__.__name__ != "type"):
fields = python_Boot.getInstanceFields(o)
_g = []
_g1 = 0
while (_g1 < len(fields)):
f = (fields[_g1] if _g1 >= 0 and _g1 < len(fields) else None)
_g1 = (_g1 + 1)
x = ((("" + ("null" if f is None else f)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))))
_g.append(x)
fieldsStr = _g
toStr = (((HxOverrides.stringOrNull(o._hx_class_name) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " )")
return toStr
else:
fields = python_Boot.getClassFields(o)
_g = []
_g1 = 0
while (_g1 < len(fields)):
f = (fields[_g1] if _g1 >= 0 and _g1 < len(fields) else None)
_g1 = (_g1 + 1)
x = ((("" + ("null" if f is None else f)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))))
_g.append(x)
fieldsStr = _g
toStr = (((("#" + HxOverrides.stringOrNull(o._hx_class_name)) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " )")
return toStr
if ((type(o) == type) and (o == str)):
return "#String"
if ((type(o) == type) and (o == list)):
return "#Array"
if callable(o):
return "function"
try:
if hasattr(o,"__repr__"):
return o.__repr__()
except BaseException as _g:
None
if hasattr(o,"__str__"):
return o.__str__([])
if hasattr(o,"__name__"):
return o.__name__
return "???"
else:
return str(o)
@staticmethod
def fields(o):
a = []
if (o is not None):
if hasattr(o,"_hx_fields"):
fields = o._hx_fields
if (fields is not None):
return list(fields)
if isinstance(o,_hx_AnonObject):
d = o.__dict__
keys = d.keys()
handler = python_Boot.unhandleKeywords
for k in keys:
if (k != '_hx_disable_getattr'):
a.append(handler(k))
elif hasattr(o,"__dict__"):
d = o.__dict__
keys1 = d.keys()
for k in keys1:
a.append(k)
return a
@staticmethod
def simpleField(o,field):
if (field is None):
return None
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def hasField(o,field):
if isinstance(o,_hx_AnonObject):
return o._hx_hasattr(field)
return hasattr(o,(("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field)))
@staticmethod
def field(o,field):
if (field is None):
return None
if isinstance(o,str):
field1 = field
_hx_local_0 = len(field1)
if (_hx_local_0 == 10):
if (field1 == "charCodeAt"):
return python_internal_MethodClosure(o,HxString.charCodeAt)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_0 == 11):
if (field1 == "lastIndexOf"):
return python_internal_MethodClosure(o,HxString.lastIndexOf)
elif (field1 == "toLowerCase"):
return python_internal_MethodClosure(o,HxString.toLowerCase)
elif (field1 == "toUpperCase"):
return python_internal_MethodClosure(o,HxString.toUpperCase)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_0 == 9):
if (field1 == "substring"):
return python_internal_MethodClosure(o,HxString.substring)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_0 == 5):
if (field1 == "split"):
return python_internal_MethodClosure(o,HxString.split)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_0 == 7):
if (field1 == "indexOf"):
return python_internal_MethodClosure(o,HxString.indexOf)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_0 == 8):
if (field1 == "toString"):
return python_internal_MethodClosure(o,HxString.toString)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_0 == 6):
if (field1 == "charAt"):
return python_internal_MethodClosure(o,HxString.charAt)
elif (field1 == "length"):
return len(o)
elif (field1 == "substr"):
return python_internal_MethodClosure(o,HxString.substr)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif isinstance(o,list):
field1 = field
_hx_local_1 = len(field1)
if (_hx_local_1 == 11):
if (field1 == "lastIndexOf"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.lastIndexOf)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 4):
if (field1 == "copy"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.copy)
elif (field1 == "join"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.join)
elif (field1 == "push"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.push)
elif (field1 == "sort"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.sort)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 5):
if (field1 == "shift"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.shift)
elif (field1 == "slice"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.slice)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 7):
if (field1 == "indexOf"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.indexOf)
elif (field1 == "reverse"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.reverse)
elif (field1 == "unshift"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.unshift)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 3):
if (field1 == "map"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.map)
elif (field1 == "pop"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.pop)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 8):
if (field1 == "contains"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.contains)
elif (field1 == "iterator"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.iterator)
elif (field1 == "toString"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.toString)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 16):
if (field1 == "keyValueIterator"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.keyValueIterator)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
elif (_hx_local_1 == 6):
if (field1 == "concat"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.concat)
elif (field1 == "filter"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.filter)
elif (field1 == "insert"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.insert)
elif (field1 == "length"):
return len(o)
elif (field1 == "remove"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.remove)
elif (field1 == "splice"):
return python_internal_MethodClosure(o,python_internal_ArrayImpl.splice)
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
else:
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def getInstanceFields(c):
f = (list(c._hx_fields) if (hasattr(c,"_hx_fields")) else [])
if hasattr(c,"_hx_methods"):
f = (f + c._hx_methods)
sc = python_Boot.getSuperClass(c)
if (sc is None):
return f
else:
scArr = python_Boot.getInstanceFields(sc)
scMap = set(scArr)
_g = 0
while (_g < len(f)):
f1 = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
if (not (f1 in scMap)):
scArr.append(f1)
return scArr
@staticmethod
def getSuperClass(c):
if (c is None):
return None
try:
if hasattr(c,"_hx_super"):
return c._hx_super
return None
except BaseException as _g:
None
return None
@staticmethod
def getClassFields(c):
if hasattr(c,"_hx_statics"):
x = c._hx_statics
return list(x)
else:
return []
@staticmethod
def unhandleKeywords(name):
if (HxString.substr(name,0,python_Boot.prefixLength) == "_hx_"):
real = HxString.substr(name,python_Boot.prefixLength,None)
if (real in python_Boot.keywords):
return real
return name
python_Boot._hx_class = python_Boot
_hx_classes["python.Boot"] = python_Boot
class python_HaxeIterator:
_hx_class_name = "python.HaxeIterator"
_hx_is_interface = "False"
__slots__ = ("it", "x", "has", "checked")
_hx_fields = ["it", "x", "has", "checked"]
_hx_methods = ["next", "hasNext"]
def __init__(self,it):
self.checked = False
self.has = False
self.x = None
self.it = it
def next(self):
if (not self.checked):
self.hasNext()
self.checked = False
return self.x
def hasNext(self):
if (not self.checked):
try:
self.x = self.it.__next__()
self.has = True
except BaseException as _g:
None
if Std.isOfType(haxe_Exception.caught(_g).unwrap(),StopIteration):
self.has = False
self.x = None
else:
raise _g
self.checked = True
return self.has
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.it = None
_hx_o.x = None
_hx_o.has = None
_hx_o.checked = None
python_HaxeIterator._hx_class = python_HaxeIterator
_hx_classes["python.HaxeIterator"] = python_HaxeIterator
class python__KwArgs_KwArgs_Impl_:
_hx_class_name = "python._KwArgs.KwArgs_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["fromT"]
@staticmethod
def fromT(d):
this1 = python_Lib.anonAsDict(d)
return this1
python__KwArgs_KwArgs_Impl_._hx_class = python__KwArgs_KwArgs_Impl_
_hx_classes["python._KwArgs.KwArgs_Impl_"] = python__KwArgs_KwArgs_Impl_
class python_Lib:
_hx_class_name = "python.Lib"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["lineEnd", "printString", "dictToAnon", "anonToDict", "anonAsDict"]
@staticmethod
def printString(_hx_str):
encoding = "utf-8"
if (encoding is None):
encoding = "utf-8"
python_lib_Sys.stdout.buffer.write(_hx_str.encode(encoding, "strict"))
python_lib_Sys.stdout.flush()
@staticmethod
def dictToAnon(v):
return _hx_AnonObject(v.copy())
@staticmethod
def anonToDict(o):
if isinstance(o,_hx_AnonObject):
return o.__dict__.copy()
else:
return None
@staticmethod
def anonAsDict(o):
if isinstance(o,_hx_AnonObject):
return o.__dict__
else:
return None
python_Lib._hx_class = python_Lib
_hx_classes["python.Lib"] = python_Lib
class python_internal_ArrayImpl:
_hx_class_name = "python.internal.ArrayImpl"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["concat", "copy", "iterator", "keyValueIterator", "indexOf", "lastIndexOf", "join", "toString", "pop", "push", "unshift", "remove", "contains", "shift", "slice", "sort", "splice", "map", "filter", "insert", "reverse", "_get", "_set"]
@staticmethod
def concat(a1,a2):
return (a1 + a2)
@staticmethod
def copy(x):
return list(x)
@staticmethod
def iterator(x):
return python_HaxeIterator(x.__iter__())
@staticmethod
def keyValueIterator(x):
return haxe_iterators_ArrayKeyValueIterator(x)
@staticmethod
def indexOf(a,x,fromIndex = None):
_hx_len = len(a)
l = (0 if ((fromIndex is None)) else ((_hx_len + fromIndex) if ((fromIndex < 0)) else fromIndex))
if (l < 0):
l = 0
_g = l
_g1 = _hx_len
while (_g < _g1):
i = _g
_g = (_g + 1)
if HxOverrides.eq(a[i],x):
return i
return -1
@staticmethod
def lastIndexOf(a,x,fromIndex = None):
_hx_len = len(a)
l = (_hx_len if ((fromIndex is None)) else (((_hx_len + fromIndex) + 1) if ((fromIndex < 0)) else (fromIndex + 1)))
if (l > _hx_len):
l = _hx_len
while True:
l = (l - 1)
tmp = l
if (not ((tmp > -1))):
break
if HxOverrides.eq(a[l],x):
return l
return -1
@staticmethod
def join(x,sep):
return sep.join([python_Boot.toString1(x1,'') for x1 in x])
@staticmethod
def toString(x):
return (("[" + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in x]))) + "]")
@staticmethod
def pop(x):
if (len(x) == 0):
return None
else:
return x.pop()
@staticmethod
def push(x,e):
x.append(e)
return len(x)
@staticmethod
def unshift(x,e):
x.insert(0, e)
@staticmethod
def remove(x,e):
try:
x.remove(e)
return True
except BaseException as _g:
None
return False
@staticmethod
def contains(x,e):
return (e in x)
@staticmethod
def shift(x):
if (len(x) == 0):
return None
return x.pop(0)
@staticmethod
def slice(x,pos,end = None):
return x[pos:end]
@staticmethod
def sort(x,f):
x.sort(key= python_lib_Functools.cmp_to_key(f))
@staticmethod
def splice(x,pos,_hx_len):
if (pos < 0):
pos = (len(x) + pos)
if (pos < 0):
pos = 0
res = x[pos:(pos + _hx_len)]
del x[pos:(pos + _hx_len)]
return res
@staticmethod
def map(x,f):
return list(map(f,x))
@staticmethod
def filter(x,f):
return list(filter(f,x))
@staticmethod
def insert(a,pos,x):
a.insert(pos, x)
@staticmethod
def reverse(a):
a.reverse()
@staticmethod
def _get(x,idx):
if ((idx > -1) and ((idx < len(x)))):
return x[idx]
else:
return None
@staticmethod
def _set(x,idx,v):
l = len(x)
while (l < idx):
x.append(None)
l = (l + 1)
if (l == idx):
x.append(v)
else:
x[idx] = v
return v
python_internal_ArrayImpl._hx_class = python_internal_ArrayImpl
_hx_classes["python.internal.ArrayImpl"] = python_internal_ArrayImpl
class HxOverrides:
_hx_class_name = "HxOverrides"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["iterator", "eq", "stringOrNull", "pop", "toLowerCase", "rshift", "modf", "mod", "mapKwArgs"]
@staticmethod
def iterator(x):
if isinstance(x,list):
return haxe_iterators_ArrayIterator(x)
return x.iterator()
@staticmethod
def eq(a,b):
if (isinstance(a,list) or isinstance(b,list)):
return a is b
return (a == b)
@staticmethod
def stringOrNull(s):
if (s is None):
return "null"
else:
return s
@staticmethod
def pop(x):
if isinstance(x,list):
_this = x
return (None if ((len(_this) == 0)) else _this.pop())
return x.pop()
@staticmethod
def toLowerCase(x):
if isinstance(x,str):
return x.lower()
return x.toLowerCase()
@staticmethod
def rshift(val,n):
return ((val % 0x100000000) >> n)
@staticmethod
def modf(a,b):
if (b == 0.0):
return float('nan')
elif (a < 0):
if (b < 0):
return -(-a % (-b))
else:
return -(-a % b)
elif (b < 0):
return a % (-b)
else:
return a % b
@staticmethod
def mod(a,b):
if (a < 0):
if (b < 0):
return -(-a % (-b))
else:
return -(-a % b)
elif (b < 0):
return a % (-b)
else:
return a % b
@staticmethod
def mapKwArgs(a,v):
a1 = _hx_AnonObject(python_Lib.anonToDict(a))
k = python_HaxeIterator(iter(v.keys()))
while k.hasNext():
k1 = k.next()
val = v.get(k1)
if a1._hx_hasattr(k1):
x = getattr(a1,k1)
setattr(a1,val,x)
delattr(a1,k1)
return a1
HxOverrides._hx_class = HxOverrides
_hx_classes["HxOverrides"] = HxOverrides
class python_internal_MethodClosure:
_hx_class_name = "python.internal.MethodClosure"
_hx_is_interface = "False"
__slots__ = ("obj", "func")
_hx_fields = ["obj", "func"]
_hx_methods = ["__call__"]
def __init__(self,obj,func):
self.obj = obj
self.func = func
def __call__(self,*args):
return self.func(self.obj,*args)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.obj = None
_hx_o.func = None
python_internal_MethodClosure._hx_class = python_internal_MethodClosure
_hx_classes["python.internal.MethodClosure"] = python_internal_MethodClosure
class HxString:
_hx_class_name = "HxString"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["split", "charCodeAt", "charAt", "lastIndexOf", "toUpperCase", "toLowerCase", "indexOf", "indexOfImpl", "toString", "substring", "substr"]
@staticmethod
def split(s,d):
if (d == ""):
return list(s)
else:
return s.split(d)
@staticmethod
def charCodeAt(s,index):
if ((((s is None) or ((len(s) == 0))) or ((index < 0))) or ((index >= len(s)))):
return None
else:
return ord(s[index])
@staticmethod
def charAt(s,index):
if ((index < 0) or ((index >= len(s)))):
return ""
else:
return s[index]
@staticmethod
def lastIndexOf(s,_hx_str,startIndex = None):
if (startIndex is None):
return s.rfind(_hx_str, 0, len(s))
elif (_hx_str == ""):
length = len(s)
if (startIndex < 0):
startIndex = (length + startIndex)
if (startIndex < 0):
startIndex = 0
if (startIndex > length):
return length
else:
return startIndex
else:
i = s.rfind(_hx_str, 0, (startIndex + 1))
startLeft = (max(0,((startIndex + 1) - len(_hx_str))) if ((i == -1)) else (i + 1))
check = s.find(_hx_str, startLeft, len(s))
if ((check > i) and ((check <= startIndex))):
return check
else:
return i
@staticmethod
def toUpperCase(s):
return s.upper()
@staticmethod
def toLowerCase(s):
return s.lower()
@staticmethod
def indexOf(s,_hx_str,startIndex = None):
if (startIndex is None):
return s.find(_hx_str)
else:
return HxString.indexOfImpl(s,_hx_str,startIndex)
@staticmethod
def indexOfImpl(s,_hx_str,startIndex):
if (_hx_str == ""):
length = len(s)
if (startIndex < 0):
startIndex = (length + startIndex)
if (startIndex < 0):
startIndex = 0
if (startIndex > length):
return length
else:
return startIndex
return s.find(_hx_str, startIndex)
@staticmethod
def toString(s):
return s
@staticmethod
def substring(s,startIndex,endIndex = None):
if (startIndex < 0):
startIndex = 0
if (endIndex is None):
return s[startIndex:]
else:
if (endIndex < 0):
endIndex = 0
if (endIndex < startIndex):
return s[endIndex:startIndex]
else:
return s[startIndex:endIndex]
@staticmethod
def substr(s,startIndex,_hx_len = None):
if (_hx_len is None):
return s[startIndex:]
else:
if (_hx_len == 0):
return ""
if (startIndex < 0):
startIndex = (len(s) + startIndex)
if (startIndex < 0):
startIndex = 0
return s[startIndex:(startIndex + _hx_len)]
HxString._hx_class = HxString
_hx_classes["HxString"] = HxString
class sys_net_Socket:
_hx_class_name = "sys.net.Socket"
_hx_is_interface = "False"
__slots__ = ("_hx___s", "input", "output")
_hx_fields = ["__s", "input", "output"]
_hx_methods = ["__initSocket", "fileno"]
def __init__(self):
self.output = None
self.input = None
self._hx___s = None
self._hx___initSocket()
self.input = sys_net__Socket_SocketInput(self._hx___s)
self.output = sys_net__Socket_SocketOutput(self._hx___s)
def _hx___initSocket(self):
self._hx___s = python_lib_socket_Socket()
def fileno(self):
return self._hx___s.fileno()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._hx___s = None
_hx_o.input = None
_hx_o.output = None
sys_net_Socket._hx_class = sys_net_Socket
_hx_classes["sys.net.Socket"] = sys_net_Socket
class python_net_SslSocket(sys_net_Socket):
_hx_class_name = "python.net.SslSocket"
_hx_is_interface = "False"
__slots__ = ("hostName",)
_hx_fields = ["hostName"]
_hx_methods = ["__initSocket"]
_hx_statics = []
_hx_interfaces = []
_hx_super = sys_net_Socket
def __init__(self):
self.hostName = None
super().__init__()
def _hx___initSocket(self):
context = python_lib_ssl_SSLContext(python_lib_Ssl.PROTOCOL_SSLv23)
context.verify_mode = python_lib_Ssl.CERT_REQUIRED
context.set_default_verify_paths()
context.options = (context.options | python_lib_Ssl.OP_NO_SSLv2)
context.options = (context.options | python_lib_Ssl.OP_NO_SSLv3)
context.options = (context.options | python_lib_Ssl.OP_NO_COMPRESSION)
context.options = (context.options | python_lib_Ssl.OP_NO_TLSv1)
self._hx___s = python_lib_socket_Socket()
self._hx___s = context.wrap_socket(self._hx___s,False,True,True,self.hostName)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.hostName = None
python_net_SslSocket._hx_class = python_net_SslSocket
_hx_classes["python.net.SslSocket"] = python_net_SslSocket
class sys_io_File:
_hx_class_name = "sys.io.File"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["getContent", "saveContent"]
@staticmethod
def getContent(path):
f = python_lib_Builtins.open(path,"r",-1,"utf-8",None,"")
content = f.read(-1)
f.close()
return content
@staticmethod
def saveContent(path,content):
f = python_lib_Builtins.open(path,"w",-1,"utf-8",None,"")
f.write(content)
f.close()
sys_io_File._hx_class = sys_io_File
_hx_classes["sys.io.File"] = sys_io_File
class sys_net__Socket_SocketInput(haxe_io_Input):
_hx_class_name = "sys.net._Socket.SocketInput"
_hx_is_interface = "False"
__slots__ = ("_hx___s",)
_hx_fields = ["__s"]
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = haxe_io_Input
def __init__(self,s):
self._hx___s = s
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._hx___s = None
sys_net__Socket_SocketInput._hx_class = sys_net__Socket_SocketInput
_hx_classes["sys.net._Socket.SocketInput"] = sys_net__Socket_SocketInput
class sys_net__Socket_SocketOutput(haxe_io_Output):
_hx_class_name = "sys.net._Socket.SocketOutput"
_hx_is_interface = "False"
__slots__ = ("_hx___s",)
_hx_fields = ["__s"]
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = haxe_io_Output
def __init__(self,s):
self._hx___s = s
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._hx___s = None
sys_net__Socket_SocketOutput._hx_class = sys_net__Socket_SocketOutput
_hx_classes["sys.net._Socket.SocketOutput"] = sys_net__Socket_SocketOutput
class sys_thread_EventLoop:
_hx_class_name = "sys.thread.EventLoop"
_hx_is_interface = "False"
__slots__ = ("mutex", "oneTimeEvents", "oneTimeEventsIdx", "waitLock", "promisedEventsCount", "regularEvents")
_hx_fields = ["mutex", "oneTimeEvents", "oneTimeEventsIdx", "waitLock", "promisedEventsCount", "regularEvents"]
_hx_methods = ["repeat", "cancel", "loop"]
def __init__(self):
self.regularEvents = None
self.promisedEventsCount = 0
self.waitLock = sys_thread_Lock()
self.oneTimeEventsIdx = 0
self.oneTimeEvents = list()
self.mutex = sys_thread_Mutex()
def repeat(self,event,intervalMs):
self.mutex.lock.acquire(True)
interval = (0.001 * intervalMs)
event1 = sys_thread__EventLoop_RegularEvent(event,(python_lib_Time.time() + interval),interval)
_g = self.regularEvents
if (_g is None):
self.regularEvents = event1
else:
current = _g
previous = None
while True:
if (current is None):
previous.next = event1
event1.previous = previous
break
elif (event1.nextRunTime < current.nextRunTime):
event1.next = current
current.previous = event1
if (previous is None):
self.regularEvents = event1
else:
event1.previous = previous
previous.next = event1
current.previous = event1
break
else:
previous = current
current = current.next
self.waitLock.semaphore.release()
self.mutex.lock.release()
return event1
def cancel(self,eventHandler):
self.mutex.lock.acquire(True)
event = eventHandler
event.cancelled = True
if (self.regularEvents == event):
self.regularEvents = event.next
_g = event.next
if (_g is not None):
e = _g
e.previous = event.previous
_g = event.previous
if (_g is not None):
e = _g
e.next = event.next
self.mutex.lock.release()
def loop(self):
recycleRegular = []
recycleOneTimers = []
while True:
now = python_lib_Time.time()
regularsToRun = recycleRegular
eventsToRunIdx = 0
nextEventAt = -1
self.mutex.lock.acquire(True)
while self.waitLock.semaphore.acquire(True,0.0):
pass
current = self.regularEvents
while (current is not None):
if (current.nextRunTime <= now):
tmp = eventsToRunIdx
eventsToRunIdx = (eventsToRunIdx + 1)
python_internal_ArrayImpl._set(regularsToRun, tmp, current)
current.nextRunTime = (current.nextRunTime + current.interval)
nextEventAt = -2
elif ((nextEventAt == -1) or ((current.nextRunTime < nextEventAt))):
nextEventAt = current.nextRunTime
current = current.next
self.mutex.lock.release()
_g = 0
_g1 = eventsToRunIdx
while (_g < _g1):
i = _g
_g = (_g + 1)
if (not (regularsToRun[i] if i >= 0 and i < len(regularsToRun) else None).cancelled):
(regularsToRun[i] if i >= 0 and i < len(regularsToRun) else None).run()
python_internal_ArrayImpl._set(regularsToRun, i, None)
eventsToRunIdx = 0
oneTimersToRun = recycleOneTimers
self.mutex.lock.acquire(True)
_g2_current = 0
_g2_array = self.oneTimeEvents
while (_g2_current < len(_g2_array)):
_g3_value = (_g2_array[_g2_current] if _g2_current >= 0 and _g2_current < len(_g2_array) else None)
_g3_key = _g2_current
_g2_current = (_g2_current + 1)
i1 = _g3_key
event = _g3_value
if (event is None):
break
else:
tmp1 = eventsToRunIdx
eventsToRunIdx = (eventsToRunIdx + 1)
python_internal_ArrayImpl._set(oneTimersToRun, tmp1, event)
python_internal_ArrayImpl._set(self.oneTimeEvents, i1, None)
self.oneTimeEventsIdx = 0
hasPromisedEvents = (self.promisedEventsCount > 0)
self.mutex.lock.release()
_g2 = 0
_g3 = eventsToRunIdx
while (_g2 < _g3):
i2 = _g2
_g2 = (_g2 + 1)
(oneTimersToRun[i2] if i2 >= 0 and i2 < len(oneTimersToRun) else None)()
python_internal_ArrayImpl._set(oneTimersToRun, i2, None)
if (eventsToRunIdx > 0):
nextEventAt = -2
r_nextEventAt = nextEventAt
r_anyTime = hasPromisedEvents
_g4 = r_anyTime
_g5 = r_nextEventAt
_g6 = _g5
if (_g6 == -2):
pass
elif (_g6 == -1):
if _g4:
self.waitLock.semaphore.acquire(True,None)
else:
break
else:
time = _g5
timeout = (time - python_lib_Time.time())
_this = self.waitLock
timeout1 = (0 if (python_lib_Math.isnan(0)) else (timeout if (python_lib_Math.isnan(timeout)) else max(0,timeout)))
_this.semaphore.acquire(True,timeout1)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.mutex = None
_hx_o.oneTimeEvents = None
_hx_o.oneTimeEventsIdx = None
_hx_o.waitLock = None
_hx_o.promisedEventsCount = None
_hx_o.regularEvents = None
sys_thread_EventLoop._hx_class = sys_thread_EventLoop
_hx_classes["sys.thread.EventLoop"] = sys_thread_EventLoop
class sys_thread__EventLoop_RegularEvent:
_hx_class_name = "sys.thread._EventLoop.RegularEvent"
_hx_is_interface = "False"
__slots__ = ("nextRunTime", "interval", "run", "next", "previous", "cancelled")
_hx_fields = ["nextRunTime", "interval", "run", "next", "previous", "cancelled"]
def __init__(self,run,nextRunTime,interval):
self.previous = None
self.next = None
self.cancelled = False
self.run = run
self.nextRunTime = nextRunTime
self.interval = interval
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.nextRunTime = None
_hx_o.interval = None
_hx_o.run = None
_hx_o.next = None
_hx_o.previous = None
_hx_o.cancelled = None
sys_thread__EventLoop_RegularEvent._hx_class = sys_thread__EventLoop_RegularEvent
_hx_classes["sys.thread._EventLoop.RegularEvent"] = sys_thread__EventLoop_RegularEvent
class sys_thread_Lock:
_hx_class_name = "sys.thread.Lock"
_hx_is_interface = "False"
__slots__ = ("semaphore",)
_hx_fields = ["semaphore"]
def __init__(self):
self.semaphore = Lock(0)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.semaphore = None
sys_thread_Lock._hx_class = sys_thread_Lock
_hx_classes["sys.thread.Lock"] = sys_thread_Lock
class sys_thread_Mutex:
_hx_class_name = "sys.thread.Mutex"
_hx_is_interface = "False"
__slots__ = ("lock",)
_hx_fields = ["lock"]
def __init__(self):
self.lock = sys_thread__Mutex_NativeRLock()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.lock = None
sys_thread_Mutex._hx_class = sys_thread_Mutex
_hx_classes["sys.thread.Mutex"] = sys_thread_Mutex
class sys_thread_NoEventLoopException(haxe_Exception):
_hx_class_name = "sys.thread.NoEventLoopException"
_hx_is_interface = "False"
__slots__ = ()
_hx_fields = []
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = haxe_Exception
def __init__(self,msg = None,previous = None):
if (msg is None):
msg = "Event loop is not available. Refer to sys.thread.Thread.runWithEventLoop."
super().__init__(msg,previous)
sys_thread_NoEventLoopException._hx_class = sys_thread_NoEventLoopException
_hx_classes["sys.thread.NoEventLoopException"] = sys_thread_NoEventLoopException
class sys_thread__Thread_Thread_Impl_:
_hx_class_name = "sys.thread._Thread.Thread_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["get_events", "processEvents"]
events = None
@staticmethod
def get_events(this1):
if (this1.events is None):
raise sys_thread_NoEventLoopException()
return this1.events
@staticmethod
def processEvents():
sys_thread__Thread_HxThread.current().events.loop()
sys_thread__Thread_Thread_Impl_._hx_class = sys_thread__Thread_Thread_Impl_
_hx_classes["sys.thread._Thread.Thread_Impl_"] = sys_thread__Thread_Thread_Impl_
class sys_thread__Thread_HxThread:
_hx_class_name = "sys.thread._Thread.HxThread"
_hx_is_interface = "False"
__slots__ = ("events", "nativeThread")
_hx_fields = ["events", "nativeThread"]
_hx_statics = ["threads", "threadsMutex", "mainThread", "current"]
def __init__(self,t):
self.events = None
self.nativeThread = t
threads = None
threadsMutex = None
mainThread = None
@staticmethod
def current():
sys_thread__Thread_HxThread.threadsMutex.lock.acquire(True)
ct = threading.current_thread()
if (ct == threading.main_thread()):
sys_thread__Thread_HxThread.threadsMutex.lock.release()
return sys_thread__Thread_HxThread.mainThread
if (not (ct in sys_thread__Thread_HxThread.threads.h)):
sys_thread__Thread_HxThread.threads.set(ct,sys_thread__Thread_HxThread(ct))
t = sys_thread__Thread_HxThread.threads.h.get(ct,None)
sys_thread__Thread_HxThread.threadsMutex.lock.release()
return t
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.events = None
_hx_o.nativeThread = None
sys_thread__Thread_HxThread._hx_class = sys_thread__Thread_HxThread
_hx_classes["sys.thread._Thread.HxThread"] = sys_thread__Thread_HxThread
class thx_semver__Version_Version_Impl_:
_hx_class_name = "thx.semver._Version.Version_Impl_"
_hx_is_interface = "False"
__slots__ = ()
_hx_statics = ["VERSION", "stringToVersion", "equals", "greaterThan", "greaterThanOrEqual", "lessThan", "lessThanOrEqual", "parseIdentifiers", "parseIdentifier", "equalsIdentifiers", "greaterThanIdentifiers", "SANITIZER", "sanitize"]
@staticmethod
def stringToVersion(s):
_this = thx_semver__Version_Version_Impl_.VERSION
_this.matchObj = python_lib_Re.search(_this.pattern,s)
if (_this.matchObj is None):
raise haxe_Exception.thrown((("Invalid SemVer format for \"" + ("null" if s is None else s)) + "\""))
major = Std.parseInt(thx_semver__Version_Version_Impl_.VERSION.matchObj.group(1))
minor = Std.parseInt(thx_semver__Version_Version_Impl_.VERSION.matchObj.group(2))
patch = Std.parseInt(thx_semver__Version_Version_Impl_.VERSION.matchObj.group(3))
pre = thx_semver__Version_Version_Impl_.parseIdentifiers(thx_semver__Version_Version_Impl_.VERSION.matchObj.group(4))
build = thx_semver__Version_Version_Impl_.parseIdentifiers(thx_semver__Version_Version_Impl_.VERSION.matchObj.group(5))
this1 = _hx_AnonObject({'version': [major, minor, patch], 'pre': pre, 'build': build})
return this1
@staticmethod
def equals(this1,other):
if (((python_internal_ArrayImpl._get(this1.version, 0) != python_internal_ArrayImpl._get(other.version, 0)) or ((python_internal_ArrayImpl._get(this1.version, 1) != python_internal_ArrayImpl._get(other.version, 1)))) or ((python_internal_ArrayImpl._get(this1.version, 2) != python_internal_ArrayImpl._get(other.version, 2)))):
return False
return thx_semver__Version_Version_Impl_.equalsIdentifiers(this1.pre,other.pre)
@staticmethod
def greaterThan(this1,other):
if ((len(this1.pre) > 0) and ((len(other.pre) > 0))):
if (((python_internal_ArrayImpl._get(this1.version, 0) == python_internal_ArrayImpl._get(other.version, 0)) and ((python_internal_ArrayImpl._get(this1.version, 1) == python_internal_ArrayImpl._get(other.version, 1)))) and ((python_internal_ArrayImpl._get(this1.version, 2) == python_internal_ArrayImpl._get(other.version, 2)))):
return thx_semver__Version_Version_Impl_.greaterThanIdentifiers(this1.pre,other.pre)
else:
return False
elif (len(other.pre) > 0):
if (python_internal_ArrayImpl._get(this1.version, 0) != python_internal_ArrayImpl._get(other.version, 0)):
return (python_internal_ArrayImpl._get(this1.version, 0) > python_internal_ArrayImpl._get(other.version, 0))
if (python_internal_ArrayImpl._get(this1.version, 1) != python_internal_ArrayImpl._get(other.version, 1)):
return (python_internal_ArrayImpl._get(this1.version, 1) > python_internal_ArrayImpl._get(other.version, 1))
if (python_internal_ArrayImpl._get(this1.version, 2) != python_internal_ArrayImpl._get(other.version, 2)):
return (python_internal_ArrayImpl._get(this1.version, 2) > python_internal_ArrayImpl._get(other.version, 2))
if (len(this1.pre) > 0):
return thx_semver__Version_Version_Impl_.greaterThanIdentifiers(this1.pre,other.pre)
else:
return True
elif (len(this1.pre) <= 0):
if (python_internal_ArrayImpl._get(this1.version, 0) != python_internal_ArrayImpl._get(other.version, 0)):
return (python_internal_ArrayImpl._get(this1.version, 0) > python_internal_ArrayImpl._get(other.version, 0))
if (python_internal_ArrayImpl._get(this1.version, 1) != python_internal_ArrayImpl._get(other.version, 1)):
return (python_internal_ArrayImpl._get(this1.version, 1) > python_internal_ArrayImpl._get(other.version, 1))
if (python_internal_ArrayImpl._get(this1.version, 2) != python_internal_ArrayImpl._get(other.version, 2)):
return (python_internal_ArrayImpl._get(this1.version, 2) > python_internal_ArrayImpl._get(other.version, 2))
return thx_semver__Version_Version_Impl_.greaterThanIdentifiers(this1.pre,other.pre)
else:
return False
@staticmethod
def greaterThanOrEqual(this1,other):
if (not thx_semver__Version_Version_Impl_.equals(this1,other)):
return thx_semver__Version_Version_Impl_.greaterThan(this1,other)
else:
return True
@staticmethod
def lessThan(this1,other):
return (not thx_semver__Version_Version_Impl_.greaterThanOrEqual(this1,other))
@staticmethod
def lessThanOrEqual(this1,other):
return (not thx_semver__Version_Version_Impl_.greaterThan(this1,other))
@staticmethod
def parseIdentifiers(s):
_this = ("" if ((None == s)) else s)
def _hx_local_1():
def _hx_local_0(s):
return (s != "")
return list(map(thx_semver__Version_Version_Impl_.parseIdentifier,list(filter(_hx_local_0,list(map(thx_semver__Version_Version_Impl_.sanitize,_this.split(".")))))))
return _hx_local_1()
@staticmethod
def parseIdentifier(s):
i = Std.parseInt(s)
if (None == i):
return thx_semver_Identifier.StringId(s)
else:
return thx_semver_Identifier.IntId(i)
@staticmethod
def equalsIdentifiers(a,b):
if (len(a) != len(b)):
return False
_g = 0
_g1 = len(a)
while (_g < _g1):
i = _g
_g = (_g + 1)
_g2 = (a[i] if i >= 0 and i < len(a) else None)
_g3 = (b[i] if i >= 0 and i < len(b) else None)
tmp = _g2.index
if (tmp == 0):
if (_g3.index == 0):
b1 = _g3.params[0]
a1 = _g2.params[0]
if (a1 != b1):
return False
elif (tmp == 1):
if (_g3.index == 1):
b2 = _g3.params[0]
a2 = _g2.params[0]
if (a2 != b2):
return False
else:
pass
return True
@staticmethod
def greaterThanIdentifiers(a,b):
_g = 0
_g1 = len(a)
while (_g < _g1):
i = _g
_g = (_g + 1)
_g2 = (a[i] if i >= 0 and i < len(a) else None)
_g3 = (b[i] if i >= 0 and i < len(b) else None)
tmp = _g2.index
if (tmp == 0):
_g4 = _g2.params[0]
tmp1 = _g3.index
if (tmp1 == 0):
_g5 = _g3.params[0]
b1 = _g5
a1 = _g4
if (a1 == b1):
continue
else:
b2 = _g5
a2 = _g4
if (a2 > b2):
return True
else:
return False
elif (tmp1 == 1):
_g6 = _g3.params[0]
return True
else:
return False
elif (tmp == 1):
_g7 = _g2.params[0]
if (_g3.index == 1):
_g8 = _g3.params[0]
b3 = _g8
a3 = _g7
if (a3 == b3):
continue
else:
b4 = _g8
a4 = _g7
if (a4 > b4):
return True
else:
return False
else:
return False
else:
return False
return False
@staticmethod
def sanitize(s):
return thx_semver__Version_Version_Impl_.SANITIZER.replace(s,"")
thx_semver__Version_Version_Impl_._hx_class = thx_semver__Version_Version_Impl_
_hx_classes["thx.semver._Version.Version_Impl_"] = thx_semver__Version_Version_Impl_
class thx_semver_Identifier(Enum):
__slots__ = ()
_hx_class_name = "thx.semver.Identifier"
_hx_constructs = ["StringId", "IntId"]
@staticmethod
def StringId(value):
return thx_semver_Identifier("StringId", 0, (value,))
@staticmethod
def IntId(value):
return thx_semver_Identifier("IntId", 1, (value,))
thx_semver_Identifier._hx_class = thx_semver_Identifier
_hx_classes["thx.semver.Identifier"] = thx_semver_Identifier
Math.NEGATIVE_INFINITY = float("-inf")
Math.POSITIVE_INFINITY = float("inf")
Math.NaN = float("nan")
Math.PI = python_lib_Math.pi
sys_thread__Thread_HxThread.threads = haxe_ds_ObjectMap()
sys_thread__Thread_HxThread.threadsMutex = sys_thread_Mutex()
sys_thread__Thread_HxThread.mainThread = sys_thread__Thread_HxThread(threading.current_thread())
sys_thread__Thread_HxThread.mainThread.events = sys_thread_EventLoop()
apptimize_util_ABTDataLock.SYSTEM_DATA_LOCK = apptimize_util_ABTDataLock.getNewLock("system_data_lock")
apptimize_util_ABTDataLock.METADATA_LOCK = apptimize_util_ABTDataLock.getNewLock("meta_data_lock")
apptimize_util_ABTDataLock.CHECK_TIME_LOCK = apptimize_util_ABTDataLock.getNewLock("last_check_time_lock")
apptimize_util_ABTDataLock.INITIALIZATION = apptimize_util_ABTDataLock.getNewLock("initialize_lock")
apptimize_ABTDataStore.resultsLock = apptimize_util_ABTDataLock.getNewLock("datastore_results_lock")
apptimize_ABTLogger.LOG_LEVEL_VERBOSE = 0
apptimize_ABTLogger.LOG_LEVEL_DEBUG = 1
apptimize_ABTLogger.LOG_LEVEL_INFO = 2
apptimize_ABTLogger.LOG_LEVEL_WARN = 3
apptimize_ABTLogger.LOG_LEVEL_ERROR = 4
apptimize_ABTLogger.LOG_LEVEL_NONE = 5
apptimize_ABTLogger.logLevel = apptimize_ABTLogger.LOG_LEVEL_VERBOSE
apptimize_ABTLogger.useTraceForLogging = False
apptimize_ApptimizeInternal.kABTEventSourceApptimize = "a"
apptimize_ApptimizeInternal.kABTValueEventKey = "value"
apptimize_ApptimizeInternal._state = 0
apptimize_api_ABTApiResultsPost.MAX_FAILURE_DELAY_MS = 60000
apptimize_api_ABTApiResultsPost.DEFAULT_FAILURE_DELAY_MS = 1000
def _hx_init_apptimize_api_ABTApiResultsPost__failureDelayMs():
def _hx_local_0():
val = apptimize_api_ABTApiResultsPost.DEFAULT_FAILURE_DELAY_MS
if (val is None):
val = 0
this1 = hx_concurrent_atomic__AtomicInt_AtomicIntImpl(val)
return this1
return _hx_local_0()
apptimize_api_ABTApiResultsPost._failureDelayMs = _hx_init_apptimize_api_ABTApiResultsPost__failureDelayMs()
apptimize_api_ABTApiResultsPost._pendingMap = haxe_ds_StringMap()
apptimize_api_ABTApiResultsPost._pendingResults = hx_concurrent_collection__SynchronizedLinkedList_SynchronizedLinkedList_Impl_._new()
apptimize_api_ABTApiResultsPost._postDispatch = apptimize_util_ABTDispatch("Results Post Dispatch Queue")
apptimize_api_ABTApiResultsPost._loadedPending = False
apptimize_api_ABTApiResultsPost.PENDING_LOCK = apptimize_util_ABTDataLock.getNewLock("pending_results_key")
apptimize_filter_ABTFilter.kABTFilterKeyValue = "value"
apptimize_filter_ABTFilter.kABTFilterKeyType = "type"
apptimize_filter_ABTFilter.kABTFilterKeyProperty = "property"
apptimize_filter_ABTFilter.kABTFilterKeyOperator = "operator"
apptimize_filter_ABTFilter.kABTFilterKeyPropertySource = "propertySource"
apptimize_filter_ABTFilter.kABTFilterKeyCallServerInputs = "callServerInputs"
apptimize_filter_ABTFilter.kABTFilterKeyCallURLKey = "callServerUrlKey"
apptimize_filter_ABTFilter.kABTFilterKeyUserAttribute = "userAttribute"
apptimize_filter_ABTFilter.kABTFilterKeyPrefixedAttribute = "prefixedAttribute"
apptimize_filter_ABTFilter.kABTFilterKeyNamedFilter = "namedFilter"
apptimize_filter_ABTFilterUtils.__meta__ = _hx_AnonObject({'statics': _hx_AnonObject({'ABTEvaluateString': _hx_AnonObject({'static': None}), 'ABTEvaluateBool': _hx_AnonObject({'static': None}), 'ABTEvaluateNumber': _hx_AnonObject({'static': None})})})
apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyFilterName = "filterName"
apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyTrueIsSticky = "trueIsSticky"
apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyFalseIsSticky = "falseIsSticky"
apptimize_filter_ABTNamedFilter.kABTNamedFilterKeyNullIsSticky = "nullIsSticky"
apptimize_models_results_ABTResultEntry.RESULT_ENTRY_CREATION_LOCK = apptimize_util_ABTDataLock.getNewLock("result_entry_creation_lock_key")
apptimize_support_persistence_ABTPersistence.LOW_LATENCY = 0
apptimize_support_persistence_ABTPersistence.HIGH_LATENCY = 1
apptimize_support_persistence_ABTPersistence.ALL_LATENCY = 2
apptimize_support_persistence_ABTPersistence.kMetadataKey = "METADATA_KEY"
apptimize_support_persistence_ABTPersistence.kUserIDKey = "USER_ID_KEY"
apptimize_support_persistence_ABTPersistence.kAnonymousGuidKey = "ANONYMOUS_GUID_KEY"
apptimize_support_persistence_ABTPersistence.kCustomPropertiesKey = "CUSTOM_PROPERTIES_KEY"
apptimize_support_persistence_ABTPersistence.kInternalPropertiesKey = "INTERNAL_PROPERTIES_KEY"
apptimize_support_persistence_ABTPersistence.kResultLogsKey = "RESULT_LOGS_KEY"
apptimize_support_persistence_ABTPersistence.kResultPostsKey = "RESULT_POSTS_KEY"
apptimize_support_persistence_ABTPersistence.kResultPostsListKey = "RESULT_POSTS_LIST_KEY"
apptimize_support_persistence_ABTPersistence.kResultEntrySequenceKey = "RESULT_ENTRY_SEQUENCE_KEY"
apptimize_support_persistence_ABTPersistence.kResultEntryTimestampKey = "RESULT_ENTRY_TIMESTAMP_KEY"
apptimize_support_persistence_ABTPersistence.kApptimizeVersionKey = "APPTIMIZE_VERSION_KEY"
apptimize_support_persistence_ABTPersistence.kLockAccessKey = "LOCK_ACCESS_KEY"
apptimize_support_persistence_ABTPersistence.kPostManagementKey = "POST_MANAGEMENT_KEY"
apptimize_support_persistence_ABTPersistence.kResultLastSubmitTimeKey = "RESULT_LAST_SUBMIT_TIME_KEY"
apptimize_support_persistence_ABTPersistence.kMetadataLastCheckTimeKey = "METADATA_LAST_CHECK_TIME_KEY"
apptimize_support_persistence_ABTPersistence.kDisabledVersions = "DISABLED_VERSIONS_KEY"
apptimize_support_persistence_ABTPersistence._isFlushing = False
apptimize_support_properties_ABTApplicationProperties._sigilForApplicationNamespace = "$"
apptimize_support_properties_ABTConfigProperties.META_DATA_URL_KEY = "meta_data_url"
apptimize_support_properties_ABTConfigProperties.META_DATA_URL_LL_KEY = "meta_data_ll_url"
apptimize_support_properties_ABTConfigProperties.META_DATA_URL_HL_KEY = "meta_data_hl_url"
apptimize_support_properties_ABTConfigProperties.LOG_LEVEL_KEY = "log_level"
apptimize_support_properties_ABTConfigProperties.FOREGROUND_PERIOD_MS_KEY = "foreground_period_ms"
apptimize_support_properties_ABTConfigProperties.RESULT_POST_DELAY_MS_KEY = "result_post_delay_ms"
apptimize_support_properties_ABTConfigProperties.THREADING_ENABLED_KEY = "threading_enabled"
apptimize_support_properties_ABTConfigProperties.RESULT_POST_THREAD_POOL_SIZE_KEY = "result_post_thread_pool_size"
apptimize_support_properties_ABTConfigProperties.ALTERATION_CACHE_SIZE_KEY = "alteration_cache_size"
apptimize_support_properties_ABTConfigProperties.RESULTS_CACHE_SIZE_KEY = "results_cache_size"
apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_ENTRIES_KEY = "maximum_result_entries"
apptimize_support_properties_ABTConfigProperties.MAXIMUM_PENDING_RESULTS_KEY = "maximum_pending_results"
apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_INTERVAL_MS_KEY = "metadata_polling_interval_ms"
apptimize_support_properties_ABTConfigProperties.METADATA_POLLING_BACKGROUND_INTERVAL_MS_KEY = "metadata_polling_background_interval_ms"
apptimize_support_properties_ABTConfigProperties.EXCEPTIONS_ENABLED_KEY = "exceptions_enabled"
apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_POST_FAILURE_KEY = "maximum_result_failures"
apptimize_support_properties_ABTConfigProperties.MAXIMUM_RESULT_POST_SENDER_TIMEOUT_MS_KEY = "maximum_result_post_sender_timeout_ms"
apptimize_support_properties_ABTConfigProperties.STORAGE_TYPE_KEY = "storage_type"
apptimize_support_properties_ABTConfigProperties.AUTOMATIC_SHUTDOWN_HOOK = "automatic_shutdown_hook"
apptimize_support_properties_ABTConfigProperties.APPTIMIZE_ENVIRONMENT_KEY = "apptimize_environment"
apptimize_support_properties_ABTConfigProperties.APPTIMIZE_REGION_KEY = "apptimize_region"
apptimize_support_properties_ABTConfigProperties.COMPRESS_PERSISTENCE_STORE_KEY = "compress_persistence_store"
apptimize_support_properties_ABTConfigProperties.GROUPS_BASE_URL_KEY = "groups_base_url"
apptimize_support_properties_ABTConfigProperties.REACT_NATIVE_STORAGE_KEY = "react_native_storage"
apptimize_support_properties_ABTConfigProperties.LOCAL_DISK_STORAGE_PATH_KEY = "local_disk_storage_path"
haxe_Serializer.USE_CACHE = False
haxe_Serializer.USE_ENUM_INDEX = False
haxe_Serializer.BASE64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789%:"
haxe_Serializer.BASE64_CODES = None
haxe_Unserializer.DEFAULT_RESOLVER = haxe__Unserializer_DefaultResolver()
haxe_Unserializer.BASE64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789%:"
haxe_Unserializer.CODES = None
def _hx_init_hx_concurrent_ServiceBase__ids():
def _hx_local_0():
this1 = hx_concurrent_atomic__AtomicInt_AtomicIntImpl(0)
return this1
return _hx_local_0()
hx_concurrent_ServiceBase._ids = _hx_init_hx_concurrent_ServiceBase__ids()
hx_concurrent_executor_Executor.NOW_ONCE = hx_concurrent_executor_Schedule.ONCE(0)
hx_concurrent_thread_ThreadPool.DEFAULT_POLL_PERIOD = 0.001
def _hx_init_hx_concurrent_thread_ThreadPool__threadIDs():
def _hx_local_0():
this1 = hx_concurrent_atomic__AtomicInt_AtomicIntImpl(0)
return this1
return _hx_local_0()
hx_concurrent_thread_ThreadPool._threadIDs = _hx_init_hx_concurrent_thread_ThreadPool__threadIDs()
pako_Inflate.DEFAULT_OPTIONS = _hx_AnonObject({'chunkSize': 16384, 'windowBits': 0, 'raw': False, 'dictionary': None})
pako_zlib_CRC32.crcTable = pako_zlib_CRC32.makeTable()
pako_zlib_InfTrees.MAXBITS = 15
pako_zlib_InfTrees.ENOUGH_LENS = 852
pako_zlib_InfTrees.ENOUGH_DISTS = 592
pako_zlib_InfTrees.CODES = 0
pako_zlib_InfTrees.LENS = 1
pako_zlib_InfTrees.DISTS = 2
pako_zlib_InfTrees.lbase = haxe_io__UInt16Array_UInt16Array_Impl_.fromArray([3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0])
pako_zlib_InfTrees.lext = haxe_io__UInt16Array_UInt16Array_Impl_.fromArray([16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78])
pako_zlib_InfTrees.dbase = haxe_io__UInt16Array_UInt16Array_Impl_.fromArray([1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577, 0, 0])
pako_zlib_InfTrees.dext = haxe_io__UInt16Array_UInt16Array_Impl_.fromArray([16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, 28, 28, 29, 29, 64, 64])
pako_zlib_Inflate.virgin = True
pako_zlib_Inflate.lenfix = None
pako_zlib_Inflate.distfix = None
def _hx_init_pako_zlib_Messages_map():
def _hx_local_0():
_g = haxe_ds_IntMap()
_g.set(2,"need dictionary")
_g.set(1,"stream end")
_g.set(0,"")
_g.set(-1,"file error")
_g.set(-2,"stream error")
_g.set(-3,"data error")
_g.set(-4,"insufficient memory")
_g.set(-5,"buffer error")
_g.set(-6,"incompatible version")
return _g
return _hx_local_0()
pako_zlib_Messages.map = _hx_init_pako_zlib_Messages_map()
python_Boot.keywords = set(["and", "del", "from", "not", "with", "as", "elif", "global", "or", "yield", "assert", "else", "if", "pass", "None", "break", "except", "import", "raise", "True", "class", "exec", "in", "return", "False", "continue", "finally", "is", "try", "def", "for", "lambda", "while"])
python_Boot.prefixLength = len("_hx_")
python_Lib.lineEnd = ("\r\n" if ((Sys.systemName() == "Windows")) else "\n")
thx_semver__Version_Version_Impl_.VERSION = EReg("^(\\d+)\\.(\\d+)\\.(\\d+)(?:[-]([a-z0-9.-]+))?(?:[+]([a-z0-9.-]+))?$","i")
thx_semver__Version_Version_Impl_.SANITIZER = EReg("[^0-9A-Za-z-]","g")
|
Apptimize
|
/Apptimize-1.2.39.tar.gz/Apptimize-1.2.39/apptimize.py
|
apptimize.py
|
# UNDER CONSTRUCTION!
# aptly_api_cli
### Why do we need another aptly cli interface?
- Because aptly-api-cli has a lot of more features build in.
- aptly-api-cli is made accessible to the python community
# Description
This python command line interface, executes calls to the Aptly server remotely, without blocking the Aptly database.
All functionality from here http://www.aptly.info/doc/api/ is extended by even more useful features, like clean out last N
snapshots or packages, etc. pp
You can make either use of the aptly_api_request.py as a starting point for your own application or just use the cli (aptly_api_cli.py)
bundled with this repository to execute your requests via command line.
# Installation
# Get started
# Command Line Options
## Help
Show this help message and exit
```
-h, --help
```
## Local Repos API
Local repositories management via REST API.
#### List
List all local repos
```
python aptly_api_cli.py --repo_list
```
#### Create
Create empty local repository with specified parameters. REPO_NAME is the name of the repository to create. COMMENT, DISTRIBUTION (e.g.: precise) and COMPONENT (e.g.: main) are optional.
```
python aptly_api_cli.py --repo_create=REPO_NAME [COMMENT] [DISTRIBUTION] [COMPONENT]
```
#### Show
Show basic information about a local repository. REPO_NAME is the name of the repository.
```
python aptly_api_cli.py --repo_show=REPO_NAME
```
#### Show Package
Show all packages of a local repository. REPO_NAME is the name of the repository. PACKAGE_TO_SEARCH (Name of the Package to search for), WITH_DEPS (e.g.: 0 or 1), FORMAT (e.g.: compact or detail) are optional. Please see http://www.aptly.info/doc/api/ for more details.
```
python aptly_api_cli.py --repo_show_packages=REPO_NAME [PACKAGE_TO_SEARCH] [WITH_DEPS] [FORMAT]
```
#### Edit
Edit information of a local repository.
```
python aptly_api_cli.py --repo_edit=REPO_NAME COMMENT DISTRIBUTION COMPONENT
```
#### Delete
Delete repository.
```
python aptly_api_cli.py --repo_delete=REPO_NAME
```
#### Add Packages
Add packages to local repo by key
```
python aptly_api_cli.py --repo_add_packages_by_key=REPO_NAME PACKAGE_REFS
```
#### Delete Packages
Delete packages from repository by key
```
python aptly_api_cli.py --repo_delete_packages_by_key=REPO_NAME PACKAGE_REFS
```
## File Upload API
Upload package files temporarily to aptly service. These files could be added to local repositories using local repositories API.
All uploaded files are stored under <rootDir>/upload directory (see configuration). This directory would be created automatically if it doesn’t exist.
Uploaded files are grouped by directories to support concurrent uploads from multiple package sources. Local repos add API can operate on directory (adding all files from directory) or on individual package files. By default, all successfully added package files would be removed.
#### List Directories
Lists all upload-directories.
```
python aptly_api_cli.py --file_list_dirs
```
#### Upload files
Upload file to local upload-directory
```
python aptly_api_cli.py --file_upload=UPLOAD_DIR FILE
```
#### Add Package
Add package from upload folder to local repo
```
python aptly_api_cli.py --repo_add_package_from_upload=REPO_NAME UPLOAD_DIR PACKAGE_NAME
```
#### List files
List uploaded files
```
python aptly_api_cli.py --file_list
```
#### Delete directory
Delete upload directory
```
python aptly_api_cli.py --file_delete_dir=UPLOAD_DIR
```
#### Delete file
Delete a file in upload directory
```
python aptly_api_cli.py --file_delete=UPLOAD_DIR FILE
```
## Snapshot API
Snapshot management APIs.
Snapshot is a immutable package reference list taken from local repository, mirror or result of other snapshot processing.
#### Create snapshot from local repo
Create snapshot from local repo by giving the snapshot and repo name as parameter. A description is optional.
```
python aptly_api_cli.py --snapshot_create_from_local_repo=SNAPSHOT_NAME REPO_NAME [DESCRIPTION]
```
#### Create snapshot by package references
Create snapshot by package references. The snapshot name, a comma separated list of snapshots and package references should be given as parameter. A description is optional.
```
python aptly_api_cli.py --snapshot_create_by_pack_refs=SNAPSHOT_NAME SOURCE_SNAPSHOTS PACKAGE_REF_LIST [DESCRIPTION]
```
#### Snapshot show
Show basic information about snapshot
```
python aptly_api_cli.py --snapshot_show=SNAPSHOT_NAME
```
#### Snapshot show packages
Show all packages the snapshot is containing or optionally search for one.
```
python aptly_api_cli.py --snapshot_show_packages=SNAPSHOT_NAME [PACKAGE_TO_SEARCH] [WITH_DEPS] [FORMAT]
```
#### Update snapshot
Rename snapshot and optionally change description
```
python aptly_api_cli.py --snapshot_update=OLD_SNAPSHOT_NAME NEW_SNAPSHOT_NAME [DESCRIPTION]
```
#### Snapshot list
Lists all available snapshots
```
python aptly_api_cli.py --snapshot_list
```
#### Snapshot diff
List differences of two snapshots
```
python aptly_api_cli.py --snapshot_diff=LEFT_SNAPSHOT_NAME RIGHT_SNAPSHOT_NAME
```
#### Snapshot delete
Delete snapshot by name. Optionally force deletion.
```
python aptly_api_cli.py --snapshot_delete=SNAPSHOT_NAME [FORCE_DELETION]
```
## Publish API
Manages published repositories.
#### Publish list
List all available repositories to publish to
```
python aptly_api_cli.py --publish_list
```
#### Publish
Publish snapshot or repository to storage
```
python aptly_api_cli.py --publish=PREFIX SOURCES_KIND SOURCES_LIST DISTRIBUTION_LIST [COMPONENT] [LABEL] [ORIGIN] [FORCE_OVERWRITE] [ARCHITECTURES_LIST]
```
#### Publish drop
Drop published repo content
```
python aptly_api_cli.py --publish_drop=PREFIX DISTRIBUTION [FORCE_REMOVAL]
```
#### Publish switch
Switching snapshots to published repo with minimal server down time.
```
python aptly_api_cli.py --publish_switch=PREFIX SOURCES_LIST DISTRIBUTION [COMPONENT] [FORCE_OVERWRITE]
```
## Misc API
#### Returns aptly version
```
python aptly_api_cli.py --get_version
```
## Package API
APIs related to packages on their own.
#### Package show
Show packages by key
```
python aptly_api_cli.py --package_show_by_key=PACKAGE_KEY
```
|
Aptly-Api-Cli
|
/Aptly-Api-Cli-0.1.tar.gz/Aptly-Api-Cli-0.1/README.md
|
README.md
|
import os
import sys
import json
import requests
#from requests.exceptions import RequestException
class AptlyApiRequests(object):
"""
Instances of this class will be able to talk
to the Aptly REST API remotely.
"""
def __init__(self):
"""
Pass url and port to the constructor
to initialize instance.
"""
basic_url = 'http://localhost'
port = ':9003'
url = basic_url + port
# self values
self.cfg = {
# Routes
'route_snap': url + '/api/snapshots/',
'route_repo': url + '/api/repos/',
'route_file': url + '/api/files/',
'route_pack': url + '/api/packages/',
'route_pub': url + '/api/publish/',
'route_graph': url + '/api/graph/',
'route_vers': url + '/api/version/',
# Number of packages to have left
'save_last_pkg': 10,
# Number of snapshots to have left
'save_last_snap': 3
}
self.headers = {'content-type': 'application/json'}
def _wrap_and_join(self, x):
return '"{0}"'.format('", "'.join(x))
def _out(self, x):
for y in x:
print y
###################
# LOCAL REPOS API #
###################
def repo_create(self, repo_name, data=None):
"""
POST /api/repos
Create empty local repository with specified parameters ( see also aptly repo create).
JSON body params:
Name: required, [string] - local repository name
Comment: [string] - text describing local repository, for the user
DefaultDistribution: [string] - default distribution when publishing from this local repo
DefaultComponent: [string] - default component when publishing from this local repo
HTTP Errors:
Code Description
400 repository with such name already exists
curl -X POST -H 'Content-Type: application/json' --data '{"Name": "aptly-repo"}' http://localhost:8080/api/repos
"""
if data is None:
post_data = {
'Name': repo_name
}
else:
post_data = {
'Name': repo_name,
'Comment': data.comment,
'DefaultDistribution': data.default_distribution,
'DefaultComponent': data.default_component
}
r = requests.post(self.cfg['route_repo'][:-1],
data=json.dumps(post_data),
headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def repo_show(self, repo_name):
"""
SHOW
GET /api/repos/:name
Returns basic information about local repository.
HTTP Errors:
Code Description
404 repository with such name doesn’t exist
Response:
Name: [string] local repository name
Comment: [string] text describing local repository, for the user
DefaultDistribution: [string] default distribution when publishing from this local repo
DefaultComponent: [string] default component when publishing from this local repo
Example:
$ curl http://localhost:8080/api/repos/aptly-repo
"""
r = requests.get(
self.cfg['route_repo'] + repo_name, headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def repo_show_packages(self, repo_name, package_to_search=None, withDeps = 0, format='compact'):
"""
SHOW PACKAGES/SEARCH
GET /api/repos/:name/packages
List all packages in local repository or perform search on repository contents and return result.
Query params:
q - package query, if missing - return all packages
withDeps - set to 1 to include dependencies when evaluating package query
format - result format, compact by default ( self, only package keys), details to return full information about each package ( self, might be slow on large repos)
Example:
$ curl http://localhost:8080/api/repos/aptly-repo/packages
"""
if package_to_search is None:
param = {
'withDeps': withDeps,
'format': format
}
else:
param = {
'q': package_to_search,
'withDeps': withDeps,
'format': format
}
url = self.cfg['route_repo'] + repo_name + '/packages'
r = requests.get( url, params=param, headers=self.headers)
# raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
return resp_data
def repo_edit(self, repo_name, data = None):
"""
EDIT
PUT /api/repos/:name
Update local repository meta information.
JSON body params:
Comment: [string] text describing local repository, for the user
DefaultDistribution: [string] default distribution when publishing from this local repo
DefaultComponent: [string] default component when publishing from this local repo
HTTP Errors:
Code Description
404 repository with such name doesn’t exist
Response is the same as for GET /api/repos/:name API.
Example:
$ curl -X PUT -H 'Content-Type: application/json' --data '{"DefaultDistribution": "trusty"}' http://localhost:8080/api/repos/local1
"""
if data is None:
data = {}
else:
data = {
'Comment': data.comment,
'DefaultDistribution': data.default_distribution,
'DefaultComponent': data.default_component
}
r = requests.put(self.cfg['route_repo'] + repo_name,
data=json.dumps(data),
headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def repo_list(self):
"""
LIST
GET /api/repos
Show list of currently available local repositories. Each repository is returned as in “show” API.
Example:
$ curl http://localhost:8080/api/repos
"""
r = requests.get(self.cfg['route_repo'], headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
return resp_data
def repo_delete(self, repo_name):
"""
DELETE
DELETE /api/repos/:name
Delete local repository.
Local repository can’t be deleted if it is published. If local repository has snapshots, aptly would refuse to delete it by default, but that can be overridden with force flag.
Query params:
force when value is set to 1, delete local repository even if it has snapshots
HTTP Errors:
Code Description
404 repository with such name doesn’t exist
409 repository can’t be dropped ( self, reason in the message)
"""
r = requests.delete(self.cfg['route_repo'] + repo_name,
headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
return resp_data
def repo_add_package_from_upload(self, repo_name, dir_name, file_name = None, params = None):
"""
ADD PACKAGES FROM UPLOADED FILE/DIRECTORY
POST /api/repos/:name/file/:dir
POST /api/repos/:name/file/:dir/:file
Import packages from files ( uploaded using File Upload API) to the local repository. If directory specified, aptly would discover package files automatically.
Adding same package to local repository is not an error.
By default aptly would try to remove every successfully processed file and directory :dir ( if it becomes empty after import).
Query params:
noRemove - when value is set to 1, don’t remove any files
forceReplace - when value is set to 1, remove packages conflicting with package being added ( in local repository)
HTTP Errors:
404 repository with such name doesn’t exist
Response:
FailedFiles [][string] list of files that failed to be processed
Report object operation report ( self, see below)
Report structure:
Warnings - [][string] list of warnings
Added -[][string] list of messages related to packages being added
Example ( file upload, add package to repo):
$ curl -X POST -F file=@aptly_0.9~dev+217+ge5d646c_i386.deb http://localhost:8080/api/files/aptly-0.9
"""
if file_name is None:
url = self.cfg['route_repo'] + repo_name + '/file/' + dir_name
else:
url = self.cfg['route_repo'] + repo_name + '/file/' + dir_name + '/' + file_name
if params is not None:
query_param = {
'noRemove': param.no_remove,
'forceReplace': param.force_replace
}
else:
query_param = {
'noRemove': 0,
'forceReplace': 0
}
r = requests.post(url,
params=query_param,
headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def repo_add_packages_by_key(self, repo_name, package_key_list):
"""
ADD PACKAGES BY KEY
POST /api/repos/:name/packages
Add packages to local repository by package keys.
Any package could be added, it should be part of aptly database ( it could come from any mirror, snapshot, other local repository). This API combined with package list ( search) APIs allows to implement importing, copying, moving packages around.
API verifies that packages actually exist in aptly database and checks constraint that conflicting packages can’t be part of the same local repository.
JSON body params:
PackageRefs [][string] list of package references ( package keys)
HTTP Errors:
Code Description
400 added package conflicts with already exists in repository
404 repository with such name doesn’t exist
404 package with specified key doesn’t exist
Response is the same as for GET /api/repos/:name API.
Example
$ curl -X POST -H 'Content-Type: application/json' --data '{"PackageRefs": ["Psource pyspi 0.6.1-1.4 f8f1daa806004e89","Pi386 libboost-program-options-dev 1.49.0.1 918d2f433384e378"]}' http://localhost:8080/api/repos/repo2/packages
"""
if len(package_key_list) <= 0:
print 'No packages were given... aborting'
return
url = self.cfg['route_repo'] + repo_name + '/packages'
param = {
'PackageRefs': package_key_list
}
r = requests.post(url, data=json.dumps(param), headers=self.headers)
resp_data = json.loads(r.content)
print resp_data
return resp_data
def repo_delete_packages_by_key(self, repo_name, package_key_list):
"""
DELETE PACKAGES BY KEY
DELETE /api/repos/:name/packages
Remove packages from local repository by package keys.
Any package could be removed from local repository. List package references in local repository could be retrieved with GET /repos/:name/packages.
JSON body params:
PackageRefs [][string] list of package references ( package keys)
HTTP Errors:
404 repository with such name doesn’t exist
Response is the same as for GET /api/repos/:name API.
Example:
$ curl -X DELETE -H 'Content-Type: application/json' --data '{"PackageRefs": ["Pi386 libboost-program-options-dev 1.49.0.1 918d2f433384e378"]}' http://localhost:8080/api/repos/repo2/packages
"""
url = self.cfg['route_repo'] + repo_name + '/packages'
data = {
'PackageRefs': package_key_list
}
r = requests.delete(url, data=json.dumps(data), headers=self.headers)
resp_data = json.loads(r.content)
print resp_data
return resp_data
###################
# FILE UPLOAD API #
###################
def file_list_directories(self):
"""
LIST DIRECTORIES
GET /api/files
List all directories.
Response: list of directory names.
Example:
$ curl http://localhost:8080/api/files
"""
r = requests.get(self.cfg['route_file'] , headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
def file_upload(self, dir_name, file):
"""
UPLOAD FILE
POST /api/files/:dir
Parameter :dir is upload directory name. Directory would be created if it doesn’t exist.
Any number of files can be uploaded in one call, aptly would preserve filenames. No check is performed if existing uploaded would be overwritten.
Response: list of uploaded files as :dir/:file.
Example:
$ curl -X POST -F file=@aptly_0.9~dev+217+ge5d646c_i386.deb http://localhost:8080/api/files/aptly-0.9
"""
f = {
'file': open(file,'rb')
}
r = requests.post(self.cfg['route_file'] + dir_name,
files=f)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def file_list(self, dir_name = None):
"""
LIST FILES IN DIRECTORY
GET /api/files/:dir
Returns list of files in directory.
Response: list of filenames.
HTTP Errors:
404 - directory doesn’t exist
Example:
$ curl http://localhost:8080/api/files/aptly-0.9
"""
if dir_name is None:
dir_name = ''
r = requests.get(self.cfg['route_file'] + dir_name , headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
return resp_data
def file_delete_directory(self, dir_name):
"""
DELETE DIRECTORY
DELETE /api/files/:dir
Deletes all files in upload directory and directory itself.
Example:
$ curl -X DELETE http://localhost:8080/api/files/aptly-0.9
"""
r = requests.delete(self.cfg['route_file'] + dir_name, headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
return resp_data
def file_delete(self, dir_name, file_name):
"""
DELETE FILE IN DIRECTORY
DELETE /api/files/:dir/:file
Delete single file in directory.
Example:
$ curl -X DELETE http://localhost:8080/api/files/aptly-0.9/aptly_0.9~dev+217+ge5d646c_i386.deb
"""
r = requests.delete(self.cfg['route_file'] + dir_name + '/' + file_name, headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print json.dumps(resp_data)
return resp_data
################
# SNAPSHOT API #
################
def snapshot_list(self, sort = 'time'):
"""
LIST
GET /api/snapshots
Return list of all snapshots created in the system.
Query params:
sort snapshot order, defaults to name, set to time to display in creation order
Example:
$ curl -v http://localhost:8080/api/snapshots
"""
params = {
'sort': sort
}
r = requests.get(self.cfg['route_snap'], headers=self.headers, params=params)
# r.raise_for_status()
resp_data = json.loads(r.content)
self._out(resp_data)
return resp_data
def snapshot_create_from_local_repo(self, snapshot_name, repo_name, description = None):
"""
CREATE SNAPSHOT FROM LOCAL REPO
POST /api/repos/:name/snapshots
Create snapshot of current local repository :name contents as new snapshot with name :snapname.
JSON body params:
Name - [string], required snapshot name
Description - [string] free-format description how snapshot has been created
HTTP Errors:
Code Description
400 snapshot with name Name already exists
404 local repo with name :name doesn’t exist
Example:
$ curl -X POST -H 'Content-Type: application/json' --data '{"Name":"snap9"}' http://localhost:8080/api/repos/local-repo/snapshots
"""
url = self.cfg['route_repo'] + repo_name + '/snapshots'
if description is None:
description = 'Description for '+ snapshot_name
data = {
'Name': snapshot_name,
'Description': description
}
r = requests.post(url, data=json.dumps(data), headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def snapshot_create_from_package_refs(self, snapshot_name, source_snapshot_list, package_refs_list, description = None):
"""
CREATE SNAPSHOT FROM PACKAGE REFS
POST /api/snapshots
Create snapshot from list of package references.
This API creates snapshot out of any list of package references. Package references could be obtained from other snapshots, local repos or mirrors.
Name - [string], required snapshot name
Description - [string] free-format description how snapshot has been created
SourceSnapshots - [][string] list of source snapshot names (only for tracking purposes)
PackageRefs - [][string] list of package keys which would be contents of the repository
Sending request without SourceSnapshots and PackageRefs would create empty snapshot.
HTTP Errors:
400 snapshot with name Name already exists, package conflict
404 source snapshot doesn’t exist, package doesn’t exist
Example:
$ curl -X POST -H 'Content-Type: application/json' --data '{"Name":"empty"}' http://localhost:8080/api/snapshots
$ curl -X POST -H 'Content-Type: application/json' --data '{"Name":"snap10", "SourceSnapshots": ["snap9"], "Description": "Custom", "PackageRefs": ["Psource pyspi 0.6.1-1.3 3a8b37cbd9a3559e"]}' http://localhost:8080/api/snapshots
"""
url = self.cfg['route_snap'][:-1]
if description is None:
description = 'Description for '+ snapshot_name
print snapshot_name
print description
print source_snapshot_list
print package_refs_list
data = {
'Name': snapshot_name,
'Description': description,
'SourceSnapshots': source_snapshot_list,
'PackageRefs': package_refs_list
}
r = requests.post(url, data=json.dumps(data), headers=self.headers)
# r.raise_for_status()
resp_data = json.loads(r.content)
print resp_data
return resp_data
def snapshot_update(self, old_snapshot_name, new_snapshot_name, description = None):
"""
UPDATE
PUT /api/snapshots/:name
Update snapshot’s description or name.
JSON body params:
Name - [string] new snapshot name
Description - [string] free-format description how snapshot has been created
HTTP Errors:
404 snapshot with such name doesn’t exist
409 rename is not possible: name already used by another snapshot
Example:
$ curl -X PUT -H 'Content-Type: application/json' --data '{"Name": "snap-wheezy"}' http://localhost:8080/api/snapshots/snap1
"""
url = self.cfg['route_snap'] + old_snapshot_name
if description is None:
description = 'Description for ' + new_snapshot_name
data = {
'Name': new_snapshot_name,
'Description': description
}
r = requests.put(url, data=json.dumps(data), headers=self.headers)
resp_data = json.loads(r.content)
print resp_data
return resp_data
def snapshot_show(self, snapshot_name):
"""
SHOW
GET /api/snapshots/:name
Get information about snapshot by name.
HTTP Errors:
Code Description
404 snapshot with such name doesn’t exist
Example:
$ curl http://localhost:8080/api/snapshots/snap1
"""
url = self.cfg['route_snap'] + snapshot_name
r = requests.get(url, headers=self.headers)
resp_data = json.loads(r.content)
print resp_data
return resp_data
def snapshot_delete(self, snapshot_name, force = '0'):
"""
DELETE
DELETE /api/snapshots/:name
Delete snapshot. Snapshot can’t be deleted if it is published. aptly would refuse to delete snapshot if it has been used as source to create other snapshots, but that could be overridden with force parameter.
Query params:
force - when value is set to 1, delete snapshot even if it has been used as source snapshot
HTTP Errors:
404 snapshot with such name doesn’t exist
409 snapshot can’t be dropped (reason in the message)
Example:
$ curl -X DELETE http://localhost:8080/api/snapshots/snap-wheezy
$ curl -X DELETE 'http://localhost:8080/api/snapshots/snap-wheezy?force=1'
"""
url = self.cfg['route_snap'] + snapshot_name
if force == '1':
print 'Forcing removal of snapshot'
param = {
'force': force
}
r = requests.delete(url, params=param, headers=self.headers)
print r.url
resp_data = json.loads(r.content)
print resp_data
return resp_data
def snapshot_show_packages(self, snapshot_name, package_to_search = None, withDeps = 0, format = 'compact'):
"""
SHOW PACKAGES/SEARCH
GET /api/snapshots/:name/packages
List all packages in snapshot or perform search on snapshot contents and return result.
Query params:
q - package query, if missing - return all packages
withDeps - set to 1 to include dependencies when evaluating package query
format - result format, compact by default ( only package keys), details to return full information about each package ( might be slow on large snapshots)
Example:
$ curl http://localhost:8080/api/snapshots/snap2/packages
$ curl http://localhost:8080/api/snapshots/snap2/packages?q='Name%20( ~%20matlab)'
"""
url = self.cfg['route_snap'] + snapshot_name + '/packages'
if package_to_search is None:
param = {
'withDeps': withDeps,
'format': format
}
else:
param = {
'q': package_to_search,
'withDeps': withDeps,
'format': format
}
r = requests.get(url, params=param, headers=self.headers)
resp_data = json.loads(r.content)
print resp_data
return resp_data
def snapshot_diff(self, snapshot_left, snapshot_right):
"""
DIFFERENCE BETWEEN SNAPSHOTS
GET /api/snapshots/:name/diff/:withSnapshot
Calculate difference between two snapshots :name (left) and :withSnapshot (right).
Response is a list of elements:
Left - package reference present only in left snapshot
Right - package reference present only in right snapshot
If two snapshots are identical, response would be empty list.
null - package reference right - snapshot has package missing in left
package reference - null - left snapshot has package missing in right
package reference - package reference snapshots have different packages
Example:
$ curl http://localhost:8080/api/snapshots/snap2/diff/snap3
"""
url = self.cfg['route_snap'] + snapshot_left + '/diff/' + snapshot_right
r = requests.get(url, headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
###############
# PUBLISH API #
###############
def publish_list(self):
"""
LIST
GET /api/publish
List published repositories.
Example:
$ curl http://localhost:8080/api/publish
"""
url = self.cfg['route_pub']
r = requests.get(url, headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
def publish(self, prefix, sources_kind, sources_list, distribution_name, component=None, label = None, origin = None, force_overwrite = None, architectures_list = None):
"""
PUBLISH SNAPSHOT/LOCAL REPO
POST /api/publish/:prefix
Publish local repository or snapshot under specified prefix. Storage might be passed in prefix as well, e.g. s3:packages/. To supply empty prefix, just remove last part (POST /api/publish)
JSON body params:
SourceKind - [string], required source kind: local for local repositories and snapshot for snapshots
Sources -[]Source, required list of Component/Name objects, Name is either local repository or snpashot name
Distribution - [string] distribution name, if missing aptly would try to guess from sources
Label [string] - value of Label: field in published repository stanza
Origin [string] - value of Origin: field in published repository stanza
ForceOverwrite - bool when publishing, overwrite files in pool/ directory without notice
Architectures - [][string] override list of published architectures
Notes on Sources field:
when publishing single component repository, Component may be omitted, it would be guessed from source or set to default value main
for multiple component published repository, Component would be guessed from source if not set
GPG signing would happen in aptly server, using local to server gpg binary, keyrings.
It’s not possible to configure publishing endpoints via API, they should be set in configuration and require aptly server restart.
HTTP errors:
400 prefix/distribution is already used by another published repository
404 source snapshot/repo hasn’t been found
Example:
$ curl -X POST -H 'Content-Type: application/json' --data '{"SourceKind": "local", "Sources": [{"Name": "local-repo"}], "Architectures": ["i386", "amd64"], "Distribution": "wheezy"}' http://localhost:8080/api/publish
$ curl -X POST -H 'Content-Type: application/json' --data '{"SourceKind": "local", "Sources": [{"Name": "0XktRe6qMFp4b8C", "Component": "contrib"}, {"Name": "EqmoTZiVx8MGN65", "Component": "non-free"}], "Architectures": ["i386", "amd64"], "Distribution": "wheezy"}' http://localhost:8080/api/publish/debian_testing/
"""
url = self.cfg['route_pub'] + prefix
if component is None:
print 'WARNING: Component was not given... setting to main'
component = 'main'
# Prepare list of sources
sources = []
comp_list = component.split()
list = sources_list.split()
if len(comp_list) != len(list):
print "ERROR: sources list and components list should have same length"
return
for x in list:
for y in comp_list:
row = {
'Name': x,
'Component': y
}
sources.append(row)
dat = {}
if label is None:
if origin is None:
if force_overwrite is None:
if architectures_list is None:
print 'simple publish'
dat = {
'SourceKind': sources_kind,
'Sources': sources,
'Distribution': distribution_name
}
else:
print 'fancy publish'
if int(force_overwrite) <= 0:
fo = False
else:
fo = True
print fo
dat = {
'SourceKind': sources_kind,
'Sources': sources,
'Distribution': distribution_name,
'Architectures': architectures_list.split(),
'Label': label,
'Origin': origin,
'ForceOverwrite': fo
}
print dat
r = requests.post(url, data=json.dumps(dat), headers=self.headers)
print r.url
resp = json.loads(r.content)
print resp
return resp
def publish_switch(self, prefix, snapshot_list, distribution, component = None, force_overwrite = 0):
"""
UPDATE PUBLISHED LOCAL REPO/SWITCH PUBLISHED SNAPSHOT
PUT /api/publish/:prefix/:distribution
API action depends on published repository contents:
if local repository has been published, published repository would be updated to match local repository contents
if snapshots have been been published, it is possible to switch each component to new snapshot
JSON body params:
Snapshots - []Source only when updating published snapshots, list of objects Component/Name
ForceOverwrite - bool when publishing, overwrite files in pool/ directory without notice
Example:
$ curl -X PUT -H 'Content-Type: application/json' --data '{"Snapshots": [{"Component": "main", "Name": "8KNOnIC7q900L5v"}]}' http://localhost:8080/api/publish//wheezy
"""
if prefix is None:
prefix = ''
if int(force_overwrite) <= 0:
fo = False
else:
fo = True
url = self.cfg['route_pub'] + prefix + '/' + distribution
snap_list_obj = []
for x in snapshot_list.split():
if component is not None:
snap_obj = {
'Component': component,
'Name': x
}
else:
snap_obj = {
'Name': x
}
snap_list_obj.append(snap_obj)
print snap_list_obj
data = {
'Snapshots': snap_list_obj,
'ForceOverwrite': fo
}
r = requests.put(url, data=json.dumps(data), headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
def publish_drop(self, prefix, distribution, force = 0):
"""
DROP PUBLISHED REPOSITORY
DELETE /api/publish/:prefix/:distribution
Delete published repository, clean up files in published directory.
Query params:
force - force published repository removal even if component cleanup fails
Usually ?force=1 isn’t required, but if due to some corruption component cleanup fails, ?force=1 could be used to drop published repository. This might leave some published repository files left under public/ directory.
Example:
$ curl -X DELETE http://localhost:8080/api/publish//wheezy
"""
url = self.cfg['route_pub'] + prefix + '/' + distribution
param = {
'force': force
}
r = requests.delete(url, params=param, headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
###############
# PACKAGE API #
###############
def package_show_by_key(self, package_key):
"""
SHOW
GET /api/packages/:key
Show information about package by package key.
Package keys could be obtained from various GET .../packages APIs.
Response:
Key - [sitring] package key (unique package identifier)
ShortKey - [string] short package key (should be unique in one package list: snapshot, mirror, local repository)
FilesHash - [string] hash of package files
Package Stanza Fields - [string] all package stanza fields, e.g. Package, Architecture, …
HTTP Errors:
Code Description
404 package with such key doesn’t exist
Example:
$ curl http://localhost:8080/api/packages/'Pi386%20libboost-program-options-dev%201.49.0.1%20918d2f433384e378'
Hint: %20 is url-encoded space.
"""
url = self.cfg['route_pack'] + package_key
r = requests.get(url, headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
#############
# GRAPH API #
#############
def graph(self, file_ext = '.png'):
"""
GET /api/graph.:ext
Generate graph of aptly objects ( same as in aptly graph command).
:ext specifies desired file extension, e.g. .png, .svg.
Example:
open url http://localhost:8080/api/graph.svg in browser (hint: aptly database should be non-empty)
"""
url = self.cfg['route_graph'][:-1] + file_ext
print url
r = requests.get(url, headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
###############
# VERSION API #
###############
def get_version(self):
"""
GET /api/version
Return current aptly version.
Example:
$ curl http://localhost:8080/api/version
"""
url = self.cfg['route_vers']
r = requests.get(url, headers=self.headers)
resp = json.loads(r.content)
print resp
return resp
|
Aptly-Api-Cli
|
/Aptly-Api-Cli-0.1.tar.gz/Aptly-Api-Cli-0.1/aptly_cli/aptly_api_requests.py
|
aptly_api_requests.py
|
import os
import sys
from optparse import OptionParser
from aptly_cli.aptly_api_requests import AptlyApiRequests
def main():
obj = AptlyApiRequests()
parser = _get_parser_opts()
(opts, args) = parser.parse_args()
_execute_opts(obj, opts, args)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
def _get_parser_opts():
parser = OptionParser()
parser.add_option('--repo_list',
action='store_true',
help='List all local repos')
parser.add_option('--repo_create',
nargs=1,
help='Create local repo', metavar='REPO_NAME [COMMENT] [DISTRIBUTION] [COMPONENT]')
parser.add_option('--repo_show_packages',
nargs=1,
help='Shows packages from repo', metavar='REPO_NAME [PACKAGE_TO_SEARCH] [WITH_DEPS] [FORMAT]')
parser.add_option('--repo_show',
nargs=1,
help='Show basic repo-information', metavar='REPO_NAME')
parser.add_option('--repo_edit',
nargs=1,
help='Edit repo-information', metavar='REPO_NAME COMMENT DISTRIBUTION COMPONENT')
parser.add_option('--repo_delete',
nargs=1,
help='Delete repository', metavar='REPO_NAME')
parser.add_option('--repo_add_packages_by_key',
nargs=2,
help='Add packages to local repo by key', metavar='REPO_NAME PACKAGE_REFS')
parser.add_option('--repo_delete_packages_by_key',
nargs=2,
help='Delete packages from repository by key', metavar='REPO_NAME PACKAGE_REFS')
parser.add_option('--file_list_dirs',
action='store_true',
help='Lists all upload-directories')
parser.add_option('--file_upload',
nargs=2,
help='Upload file to local upload-directory', metavar='UPLOAD_DIR FILE')
parser.add_option('--repo_add_package_from_upload',
nargs=3,
help='Add package from upload folder to local repo', metavar='REPO_NAME UPLOAD_DIR PACKAGE_NAME')
parser.add_option('--file_list',
action='store_true',
help='List uploaded files')
parser.add_option('--file_delete_dir',
nargs=1,
help='Delete upload directory', metavar='UPLOAD_DIR')
parser.add_option('--file_delete',
nargs=2,
help='Delete a file in upload directory', metavar='UPLOAD_DIR FILE')
parser.add_option('--snapshot_create_from_local_repo',
nargs=2,
help='Create snapshot from local repo', metavar='SNAPSHOT_NAME REPO_NAME [DESCRIPTION]')
parser.add_option('--snapshot_create_by_pack_refs',
nargs=3,
help='Create snapshot by package references (Please use %20 for spaces for one package reference)',
metavar='SNAPSHOT_NAME SOURCE_SNAPSHOTS PACKAGE_REF_LIST [DESCRIPTION]')
parser.add_option('--snapshot_show',
nargs=1,
help='Show basic information about snapshot', metavar='SNAPSHOT_NAME')
parser.add_option('--snapshot_show_packages',
nargs=1,
help='Show all packages the snapshot is containing or optionally search for one.', metavar='SNAPSHOT_NAME [PACKAGE_TO_SEARCH] [WITH_DEPS] [FORMAT]')
parser.add_option('--snapshot_update',
nargs=2,
help='Rename snapshot and optionally change description', metavar='OLD_SNAPSHOT_NAME NEW_SNAPSHOT_NAME [DESCRIPTION]')
parser.add_option('--snapshot_list',
action='store_true',
help='Lists all available snapshots', metavar='[SORT_BY_NAME_OR_TIME]')
parser.add_option('--snapshot_diff',
nargs=2,
help='List differences of two snapshots', metavar='LEFT_SNAPSHOT_NAME RIGHT_SNAPSHOT_NAME')
parser.add_option('--snapshot_delete',
nargs=1,
help='Delete snapshot by name. Optionally force deletion.', metavar='SNAPSHOT_NAME [FORCE_DELETION]')
parser.add_option('--publish_list',
action='store_true',
help='List all available repositories to publish to')
parser.add_option('--publish',
nargs=4,
help='Publish snapshot or repository to storage',
metavar='PREFIX SOURCES_KIND SOURCES_LIST DISTRIBUTION_LIST [COMPONENT] [LABEL] [ORIGIN] [FORCE_OVERWRITE] [ARCHITECTURES_LIST]')
parser.add_option('--publish_drop',
nargs=2,
help='Drop published repo content',
metavar='PREFIX DISTRIBUTION [FORCE_REMOVAL]')
parser.add_option('--publish_switch',
nargs=3,
help='Switching snapshots to published repo with minimal server down time.',
metavar='PREFIX SOURCES_LIST DISTRIBUTION [COMPONENT] [FORCE_OVERWRITE]')
parser.add_option('--get_version',
action='store_true',
help='Returns aptly version')
parser.add_option('--package_show_by_key',
nargs=1,
help='Show packages by key',
metavar='PACKAGE_KEY')
return parser
def _execute_opts(obj, opts, args):
if opts.repo_list:
obj.repo_list()
if opts.repo_create:
if len(args) >= 3:
data.comment=args[0]
data.default_distribution=args[1]
data.default_component=args[2]
obj.repo_create(opts.repo_create, data)
else:
obj.repo_create(opts.repo_create)
if opts.repo_show_packages:
if len(args) >= 3:
obj.repo_show_packages(opts.repo_show_packages, args[0], args[1], args[2])
else:
obj.repo_show_packages(opts.repo_show_packages)
if opts.repo_show:
obj.repo_show(opts.repo_show)
if opts.repo_edit:
if len(args) >= 3:
data.comment=args[0]
data.default_distribution=args[1]
data.default_component=args[2]
obj.repo_edit(opts.repo_edit, data)
else:
print 'Wrong usage!'
if opts.repo_delete:
obj.repo_delete(opts.repo_delete)
if opts.file_list_dirs:
obj.file_list_directories()
if opts.file_upload:
obj.file_upload(opts.file_upload[0], opts.file_upload[1])
if opts.repo_add_package_from_upload:
obj.repo_add_package_from_upload(opts.repo_add_package_from_upload[0], opts.repo_add_package_from_upload[1], opts.repo_add_package_from_upload[2])
if opts.repo_add_packages_by_key:
print 'repo_add_packages_by_key'
o = opts.repo_add_packages_by_key
key_list = o[1].split(', ')
obj.repo_add_packages_by_key(o[0], key_list)
if opts.repo_delete_packages_by_key:
print 'repo_delete_packages_by_key'
o = opts.repo_delete_packages_by_key
key_list = o[1].split(', ')
obj.repo_delete_packages_by_key(o[0], key_list)
if opts.file_list:
obj.file_list()
if opts.file_delete_dir:
obj.file_delete_directory(opts.file_delete_dir)
if opts.file_delete:
obj.file_delete(opts.file_delete[0], opts.file_delete[1])
if opts.snapshot_create_from_local_repo:
if len(args) >= 1:
obj.snapshot_create_from_local_repo(opts.snapshot_create_from_local_repo[0], opts.snapshot_create_from_local_repo[1], args[0])
else:
obj.snapshot_create_from_local_repo(opts.snapshot_create_from_local_repo[0], opts.snapshot_create_from_local_repo[1])
if opts.snapshot_create_by_pack_refs:
o = opts.snapshot_create_by_pack_refs
l = o[2].split(', ')
if len(args) >= 1:
obj.snapshot_create_from_package_refs(o[0], o[1].split(', '), l, args[0])
else:
obj.snapshot_create_from_package_refs(o[0], o[1].split(', '), l)
if opts.snapshot_show_packages:
if len(args) >= 3:
obj.snapshot_show_packages(opts.snapshot_show_packages, args[0], args[1], args[2])
else:
obj.snapshot_show_packages(opts.snapshot_show_packages)
if opts.snapshot_update:
if len(args) >= 1:
obj.snapshot_update(opts.snapshot_update[0], opts.snapshot_update[1], args[0])
if opts.snapshot_list:
if len(args) >= 1:
obj.snapshot_list(args[0])
else:
obj.snapshot_list()
if opts.snapshot_diff:
obj.snapshot_diff(opts.snapshot_diff[0], opts.snapshot_diff[1])
if opts.snapshot_delete:
if len(args) >= 1:
print args[0]
obj.snapshot_delete(opts.snapshot_delete, args[0])
else:
obj.snapshot_delete(opts.snapshot_delete)
if opts.publish_list:
obj.publish_list()
if opts.publish:
if len(args) >= 5:
obj.publish(opts.publish[0], opts.publish[1], opts.publish[2], opts.publish[3], args[0], args[1], args[2], args[3], args[4])
else:
obj.publish(opts.publish[0], opts.publish[1], opts.publish[2], opts.publish[3])
if opts.publish_switch:
if len(args) >= 2:
obj.publish_switch(opts.publish_switch[0], opts.publish_switch[1], opts.publish_switch[2], args[0], args[1])
else:
obj.publish_switch(opts.publish_switch[0], opts.publish_switch[1], opts.publish_switch[2])
if opts.publish_drop:
if len(args) >= 1:
obj.publish_drop(opts.publish_drop[0], opts.publish_drop[1], args[0])
else:
obj.publish_drop(opts.publish_drop[0], opts.publish_drop[1])
if opts.package_show_by_key:
obj.package_show_by_key(opts.package_show_by_key)
if opts.get_version:
obj.get_version()
if __name__ == "__main__":
sys.exit(main())
|
Aptly-Api-Cli
|
/Aptly-Api-Cli-0.1.tar.gz/Aptly-Api-Cli-0.1/aptly_cli/cli/cli.py
|
cli.py
|
import time
from aptus.timeutil import duration, future
class NullProgressReporter:
""" Basic interface for reporting rendering progress.
"""
def begin(self):
""" Called once at the beginning of a render.
"""
pass
def progress(self, arg, num_done, info=''):
""" Called repeatedly to report progress.
`arg` is an opaque argument, the caller can use it for whatever they want.
`num_done` is a int indicating the count of progress. There is no
defined range for `num_done`, it is assumed that the caller knows what
work is being done, and what the number mean.
`info` is a string giving some information about what's been done.
"""
pass
def end(self):
""" Called once at the end of a render.
"""
pass
class IntervalProgressReporter:
""" A progress reporter decorator that only calls its wrapped reporter
every N seconds.
"""
def __init__(self, nsec, reporter):
self.nsec = nsec
self.reporter = reporter
def begin(self):
self.latest = time.time()
self.reporter.begin()
def progress(self, arg, num_done, info=''):
now = time.time()
if now - self.latest > self.nsec:
self.reporter.progress(arg, num_done, info)
self.latest = now
def end(self):
self.reporter.end()
class AggregateProgressReporter:
""" Collect a number of progress reporters into a single unified front.
"""
def __init__(self):
self.kids = []
def add(self, reporter):
self.kids.append(reporter)
def begin(self):
for kid in self.kids:
kid.begin()
def progress(self, arg, num_done, info=''):
for kid in self.kids:
kid.progress(arg, num_done, info)
def end(self):
for kid in self.kids:
kid.end()
# Cheap way to measure and average a number of runs.
nruns = 0
totaltotal = 0
class ConsoleProgressReporter:
""" A progress reporter that writes lines to the console.
This `progress` function interprets the `num_done` arg as a fraction, in
millionths.
"""
def begin(self):
self.start = time.time()
def progress(self, arg, num_done, info=''):
frac_done = num_done / 1000000.0
now = time.time()
so_far = int(now - self.start)
to_go = int(so_far / frac_done * (1-frac_done))
if info:
info = ' ' + info
print("%5.2f%%: %11s done, %11s to go, eta %10s%s" % (
frac_done*100, duration(so_far), duration(to_go), future(to_go), info
))
def end(self):
total = time.time() - self.start
global totaltotal, nruns
totaltotal += total
nruns += 1
print("Total: %s (%.4fs)" % (duration(total), total))
#print("Running average: %.6fs over %d runs" % (totaltotal/nruns, nruns))
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/progress.py
|
progress.py
|
import copy
import json
import math
import multiprocessing
import queue
import threading
import time
import numpy
from aptus import __version__, settings
from aptus.engine import AptEngine
from aptus.options import AptusState
from aptus.palettes import all_palettes
from aptus.progress import NullProgressReporter
class WorkerPool:
def __init__(self):
self.workers = [] # List of threads that will do work.
self.work = queue.Queue(0) # The queue of work items.
self.num_threads = multiprocessing.cpu_count()
def get_ready(self):
"""Call before adding work. Prepares the pool."""
if not self.workers:
for i in range(self.num_threads):
t = threading.Thread(target=self.worker)
t.setDaemon(True)
t.start()
self.workers.append(t)
def put(self, work_item):
"""Add a work item, which should be (q, compute, coords)."""
self.work.put(work_item)
def worker(self):
"""The work function on each of our compute threads."""
while True:
result_queue, apt_compute, n_tile, coords = self.work.get()
apt_compute.compute_array(n_tile, coords)
result_queue.put(coords)
class BucketCountingProgressReporter:
""" A progress reporter for parallel tiles.
"""
def __init__(self, num_buckets, expected_total, reporter):
self.buckets = [0] * num_buckets
self.expected_total = expected_total
self.reporter = reporter
def begin(self):
self.reporter.begin()
def progress(self, arg, num_done, info=''):
"""Bucket-counting progress.
`arg` is the number of the tile. `num_done` is the number of pixels
computed so far in that tile.
"""
self.buckets[arg] = num_done
# Compute a fraction, in millionths.
total = sum(self.buckets)
frac_done = int(total * 1000000.0 / self.expected_total)
self.reporter.progress(0, frac_done, "[%2d] %s" % (arg, info))
def end(self):
self.reporter.end()
class GridParams:
def __init__(self):
self.bounds = (600, 600)
self.ridxdy = (0, .1, .1, 0)
self.ri0 = (0, 0)
@classmethod
def from_user_perspective(cls, center, diam, angle, size):
gparams = cls()
gparams.bounds = size
# pixsize is the size of a single sample, in real units.
pixsize = max(
diam[0] / gparams.bounds[0],
diam[1] / gparams.bounds[1],
)
rad = math.radians(angle)
dx = math.cos(rad) * pixsize
dy = math.sin(rad) * pixsize
# The upper-left corner is computed from the center, minus the radii,
# plus half a pixel, so that we're sampling the center of the pixel.
gparams.ridxdy = (dx, dy, dy, -dx)
halfsizew = gparams.bounds[0]/2.0 - 0.5
halfsizeh = gparams.bounds[1]/2.0 - 0.5
ri0x = center[0] - halfsizew * gparams.ridxdy[0] - halfsizeh * gparams.ridxdy[2]
ri0y = center[1] - halfsizew * gparams.ridxdy[1] - halfsizeh * gparams.ridxdy[3]
# In order for x-axis symmetry to apply, the x axis has to fall between
# pixels or through the center of a pixel.
pix_offset, _ = math.modf(ri0y / pixsize)
ri0y -= pix_offset * pixsize
gparams.ri0 = ri0x, ri0y
return gparams
def subtile(self, xmin, xmax, ymin, ymax):
"""
Make a new GridParams for a subtile of this one.
"""
tileparams = GridParams()
tileparams.bounds = (xmax - xmin, ymax - ymin)
ri0x, ri0y = self.ri0
tileparams.ridxdy = rixdx, rixdy, riydx, riydy = self.ridxdy
tileparams.ri0 = (
ri0x + xmin * rixdx + ymin * rixdy,
ri0y + xmin * riydx + ymin * riydy,
)
return tileparams
def coords_from_pixel(self, x, y):
""" Get the coords of a pixel in the grid. Note that x and y can be
fractional.
"""
# The .5 adjustment is because the grid is aligned to the center of the
# pixels, but we need to return the upper-left of the pixel so that other
# math comes out right.
x = float(x) - 0.5
y = float(y) - 0.5
r = self.ri0[0] + self.ridxdy[0]*x + self.ridxdy[2]*y
i = self.ri0[1] + self.ridxdy[1]*x + self.ridxdy[3]*y
return r, i
def pixel_from_coords(self, r, i):
""" Get the pixel coords containing the fractal coordinates.
"""
d0, d1, d2, d3 = self.ridxdy
ri00, ri01 = self.ri0
# Thanks, Maxima!
x = (d2*(i-ri01)+d3*ri00-d3*r)/(d1*d2-d0*d3)
y = -(d0*(i-ri01)+d1*ri00-d1*r)/(d1*d2-d0*d3)
return x, y
class AptusCompute:
""" The Mandelbrot compute class. It wraps the AptEngine to provide pythonic
convenience.
There are two coordinate systems at work here: the ri plane is the
fractal plane, real and imaginary floats. The xy plane are screen coordinates,
in pixels, usually integers.
"""
worker_pool = WorkerPool()
def __init__(self):
# geometry
self.center = settings.mandelbrot_center
self.diam = settings.mandelbrot_diam, settings.mandelbrot_diam
self.size = settings.explorer_size
self.angle = 0.0
self._geometry_attributes = ['center', 'diam', 'size', 'angle']
# computation
self.iter_limit = 1000
self.continuous = False
self.supersample = 1
self.mode = 'mandelbrot'
self.rijulia = 0.0, 0.0
self._computation_attributes = ['iter_limit', 'continuous', 'supersample', 'mode', 'rijulia']
# coloring
self.palette = all_palettes[0]
self.palette_phase = 0
self.palette_scale = 1.0
self._coloring_attributes = ['palette', 'palette_phase', 'palette_scale']
# other
self.outfile = 'Aptus.png'
self.quiet = False
# The C extension for doing the heavy lifting.
self.eng = AptEngine()
self.gparams = GridParams()
# counts is a numpy array of 32bit ints: the iteration counts at each pixel.
self.counts = None
# status is a numpy array of 8bit ints that tracks the boundary trace
# status of each pixel: 0 for not computed, 1 for computed but not traced,
# 2 for tracing, and 3 for traced.
self.status = None
# An array for the output pixels.
self.pix = None
# A gray checkerboard
self.chex = None
self.pixels_computed = False
self._clear_old_geometry()
def _record_old_geometry(self):
""" Call this before any of the geometry settings change, to possibly
optimize the next computation.
"""
self.old_grid_bounds = self.gparams.bounds
self.old_ridxdy = self.gparams.ridxdy
self.old_ri0 = self.gparams.ri0
self.old_angle = self.angle
for a in self._computation_attributes:
setattr(self, 'old_'+a, getattr(self, a))
def _clear_old_geometry(self):
self.old_grid_bounds = (0,0)
self.old_ridxdy = (0, 0, 0, 0)
self.old_ri0 = (0,0)
self.old_angle = 0
for a in self._computation_attributes:
setattr(self, 'old_'+a, 0)
def computation_changed(self):
for a in self._computation_attributes:
if getattr(self, 'old_'+a) != getattr(self, a):
return True
return False
def grid_params(self):
size = (self.size[0] * self.supersample, self.size[1] * self.supersample)
gparams = GridParams.from_user_perspective(
self.center, self.diam, self.angle, size
)
self.pixsize = math.hypot(*gparams.ridxdy[:2])
return gparams
def create_mandel(self, gparams=None):
if gparams is None:
gparams = self.grid_params()
self.gparams = gparams
self.progress = NullProgressReporter()
self.while_waiting = None
self.stats = ComputeStats()
# Create new workspaces for the compute engine.
old_counts = self.counts
self.counts = numpy.zeros((self.gparams.bounds[1], self.gparams.bounds[0]), dtype=numpy.uint32)
self.status = numpy.zeros((self.gparams.bounds[1], self.gparams.bounds[0]), dtype=numpy.uint8)
# Figure out if we can keep any of our old counts or not.
if (old_counts is not None and
self.gparams.ridxdy == self.old_ridxdy and
not self.computation_changed()):
# All the params are compatible, see how much we shifted.
dx, dy = self.pixel_from_coords(*self.old_ri0)
dx = int(round(dx))
dy = int(round(dy))
# Figure out what rectangle is still valid, keep in mind the old
# and new rectangles could be different sizes.
nc = min(self.counts.shape[1] - abs(dx), old_counts.shape[1])
nr = min(self.counts.shape[0] - abs(dy), old_counts.shape[0])
if nc > 0 and nr > 0:
# Some rows and columns are shared between old and new.
if dx >= 0:
oldx, newx = 0, dx
else:
oldx, newx = -dx, 0
if dy >= 0:
oldy, newy = 0, dy
else:
oldy, newy = -dy, 0
# Copy the common rectangles. Old_counts gets copied to counts,
# and status gets the common rectangle filled with 3's.
self.counts[newy:newy+nr,newx:newx+nc] = old_counts[oldy:oldy+nr,oldx:oldx+nc]
self.status[newy:newy+nr,newx:newx+nc] = 3 # 3 == Fully computed and filled
# In desperate times, printing the counts and status might help...
if 0:
for y in range(self.gparams.bounds[1]):
l = ""
for x in range(self.gparams.bounds[0]):
l += "%s%s" % (
"_-=@"[self.status[y,x]],
"0123456789"[self.counts[y,x]%10]
)
print(l)
self.pixels_computed = False
self._clear_old_geometry()
self._set_engine_parameters()
def clear_results(self):
""" Discard any results held.
"""
self.counts = None
def copy_all(self, other):
""" Copy the important attributes from other to self.
"""
self.copy_geometry(other)
self.copy_coloring(other)
self.copy_computation(other)
def copy_geometry(self, other):
""" Copy the geometry attributes from other to self, returning True if
any of them actually changed.
"""
return self._copy_attributes(other, self._geometry_attributes)
def copy_coloring(self, other):
""" Copy the coloring attributes from other to self, returning True if
any of them actually changed.
"""
return self._copy_attributes(other, self._coloring_attributes)
def copy_computation(self, other):
""" Copy the computation attributes from other to self, returning True if
any of them actually changed.
"""
return self._copy_attributes(other, self._computation_attributes)
def _copy_attributes(self, other, attrs):
""" Copy a list of attributes from other to self, returning True if
any of them actually changed.
"""
changed = False
for attr in attrs:
# Detect if changed, then copy the attribute regardless. This makes
# the .palette copy every time, which guarantees proper drawing at
# the expense of a lot of palette copying.
if getattr(self, attr) != getattr(other, attr):
changed = True
otherval = copy.deepcopy(getattr(other, attr))
setattr(self, attr, otherval)
return changed
def color_mandel(self):
w, h = self.counts.shape
if (self.chex is None) or (self.chex.shape[:2] != self.counts.shape):
# Make a checkerboard
sq = 15
c = numpy.fromfunction(lambda x,y: ((x//sq) + (y//sq)) % 2, (w,h))
self.chex = numpy.empty((w,h,3), dtype=numpy.uint8)
self.chex[c == 0] = (0xAA, 0xAA, 0xAA)
self.chex[c == 1] = (0x99, 0x99, 0x99)
self.pix = numpy.copy(self.chex)
# Modulo in C is ill-defined if anything is negative, so make sure the
# phase is positive if we're going to wrap.
phase = self.palette_phase
color_bytes = self.palette.color_bytes()
if self.palette.wrap:
phase %= len(color_bytes)
self.eng.apply_palette(
self.counts, self.status, color_bytes, phase, self.palette_scale,
self.palette.incolor, self.palette.wrap, self.pix
)
return self.pix
def _set_engine_parameters(self):
self.eng.ri0 = self.gparams.ri0
self.eng.ridxdy = self.gparams.ridxdy
self.eng.iter_limit = self.iter_limit
# Set bailout differently based on continuous or discrete coloring.
if self.continuous:
self.eng.bailout = 100.0
else:
self.eng.bailout = 2.0
# Continuous is really two different controls in the engine.
self.eng.cont_levels = self.eng.blend_colors = 256 if self.continuous else 1
# Different modes require different settings.
if self.mode == "mandelbrot":
self.eng.julia = 0
self.eng.rijulia = (0, 0)
self.eng.trace_boundary = 1
self.eng.check_cycles = 1
elif self.mode == "julia":
self.eng.julia = 1
self.eng.rijulia = tuple(self.rijulia)
self.eng.trace_boundary = 0
self.eng.check_cycles = 0
else:
raise Exception("Unknown mode: %r" % (self.mode,))
def compute_pixels(self):
if self.pixels_computed:
return
if not self.quiet:
print("ri %r step %r, angle %.1f, iter_limit %r, size %r" % (
self.eng.ri0, self.pixsize, self.angle, self.eng.iter_limit, self.gparams.bounds
))
print("center %r, diam %r" % (self.center, self.diam))
# Figure out how many pixels have to be computed: make a histogram of
# the buckets of values: 0,1,2,3.
buckets, _ = numpy.histogram(self.status, 4, (0, 3))
num_compute = buckets[0]
x_side_cuts, y_side_cuts = self.slice_tiles()
self.bucket_progress = BucketCountingProgressReporter(x_side_cuts*y_side_cuts, num_compute, self.progress)
self.bucket_progress.begin()
self.progress = self.bucket_progress
self.refresh_rate = .5
#self.eng.debug_callback = self.debug_callback
if self.worker_pool:
# Start the threads going.
self.worker_pool.get_ready()
# Create work items with the tiles to compute
result_queue = queue.Queue(0)
n_todo = 0
xcuts = self.cuts(0, self.counts.shape[1], x_side_cuts)
ycuts = self.cuts(0, self.counts.shape[0], y_side_cuts)
for i in range(y_side_cuts):
for j in range(x_side_cuts):
coords = (xcuts[j], xcuts[j+1], ycuts[i], ycuts[i+1])
self.worker_pool.put((result_queue, self, n_todo, coords))
n_todo += 1
# Wait for the workers to finish, calling our while_waiting function
# periodically.
next_time = time.time() + self.refresh_rate
while n_todo:
while True:
if self.while_waiting and time.time() > next_time:
self.while_waiting()
next_time = time.time() + self.refresh_rate
try:
result_queue.get(timeout=self.refresh_rate)
n_todo -= 1
break
except queue.Empty:
pass
else:
# Not threading: just compute the whole rectangle right now.
self.compute_array()
# Clean up
self.bucket_progress.end()
self._record_old_geometry()
self.pixels_computed = True
# Once compute_array is done, the status array is all 3's, so there's no
# point in keeping it around.
self.status = None
def cuts(self, lo, hi, n):
"""Return a list of n+1 evenly spaced numbers between `lo` and `hi`."""
return [int(round(lo+float(i)*(hi-lo)/n)) for i in range(n+1)]
def slice_tiles(self):
"""Decide how to divide the current view into tiles for workers.
Returns two numbers, the number of tiles in the x and y directions.
"""
# Slice into roughly 200-pixel tiles.
x, y = max(self.gparams.bounds[0]//200, 1), max(self.gparams.bounds[1]//200, 1)
# If the xaxis is horizontal, and is in the middle third of the image,
# then slice the window into vertical slices to maximize the benefit of
# the axis symmetry.
if self.angle == 0:
top = self.gparams.ri0[1]
height = self.gparams.bounds[1] * self.gparams.ridxdy[0]
if top > 0 and height > top:
axis_frac = top / height
if .25 < axis_frac < .75:
# Use tall slices to get axis symmetry
y = 1
return x, y
def compute_array(self, n_tile=0, coords=None):
if coords is not None:
xmin, xmax, ymin, ymax = coords
else:
xmin = ymin = 0
ymax, xmax = self.counts.shape
# The computation optimizations can go wrong if the set is zoomed out
# too far. They decide the entire set can be flood-filled since the
# edges of the view are all the same count. To prevent this, if we are
# zoomed out enough to see the entire radius-2 circle, and the origin
# is in the view, then compute in two halves, split at x=0.
optimization_safe = True
x0, y0 = self.pixel_from_coords(0.0, 0.0)
if xmin <= x0 < xmax and ymin <= y0 < ymax:
min_tile_diam = min(
(xmax - xmin) * self.diam[0] / self.size[0],
(ymax - ymin) * self.diam[1] / self.size[1],
)
if min_tile_diam >= 4.0:
optimization_safe = False
if optimization_safe:
self._compute_array(n_tile, (xmin, xmax, ymin, ymax))
else:
self._compute_array(n_tile, (xmin, int(x0), ymin, ymax))
self._compute_array(n_tile, (int(x0), xmax, ymin, ymax))
def _compute_array(self, n_tile=0, coords=None):
xmin, xmax, ymin, ymax = coords
stats = self.eng.compute_array(
self.counts, self.status,
xmin, xmax, ymin, ymax,
n_tile, self.progress.progress
)
self.stats += stats
def set_counts(self, counts):
self.counts = counts
self.status = numpy.full((self.gparams.bounds[1], self.gparams.bounds[0]), 3, dtype=numpy.uint8)
def debug_callback(self, info):
print(info)
# Information methods
def coords_from_pixel(self, x, y):
return self.gparams.coords_from_pixel(x, y)
def pixel_from_coords(self, r, i):
return self.gparams.pixel_from_coords(r, i)
# Output-writing methods
def write_image(self, im, fout):
""" Write the image `im` to the path or file object `fout`.
"""
# PNG info mojo from: http://blog.modp.com/2007/08/python-pil-and-png-metadata-take-2.html
from PIL import PngImagePlugin
aptst = AptusState(self)
info = PngImagePlugin.PngInfo()
info.add_text("Software", "Aptus %s" % __version__)
info.add_text("Aptus State", aptst.write_string())
info.add_text("Aptus Stats", json.dumps(self.stats))
im.save(fout, 'PNG', pnginfo=info)
class ComputeStats(dict):
"""Collected statistics about the computation."""
# This statmap is also used by gui.StatsPanel
statmap = [
{ 'label': 'Min iteration', 'key': 'miniter', 'sum': min },
{ 'label': 'Max iteration', 'key': 'maxiter', 'sum': max },
{ 'label': 'Total iterations', 'key': 'totaliter', 'sum': sum },
{ 'label': 'Total cycles', 'key': 'totalcycles', 'sum': sum },
{ 'label': 'Shortest cycle', 'key': 'minitercycle', 'sum': min },
{ 'label': 'Longest cycle', 'key': 'maxitercycle', 'sum': max },
{ 'label': 'Maxed points', 'key': 'maxedpoints', 'sum': sum },
{ 'label': 'Computed points', 'key': 'computedpoints', 'sum': sum },
{ 'label': 'Filled points', 'key': 'filledpoints', 'sum': sum },
{ 'label': 'Flipped points', 'key': 'flippedpoints', 'sum': sum },
{ 'label': 'Boundaries traced', 'key': 'boundaries', 'sum': sum },
{ 'label': 'Boundaries filled', 'key': 'boundariesfilled', 'sum': sum },
{ 'label': 'Longest boundary', 'key': 'longestboundary', 'sum': max },
{ 'label': 'Largest fill', 'key': 'largestfilled', 'sum': max },
{ 'label': 'Min edge iter', 'key': 'miniteredge', 'sum': min },
]
def __init__(self):
for stat in self.statmap:
self[stat['key']] = None
def __iadd__(self, other):
"""Accumulate a dict of stats to ourselves."""
for stat in self.statmap:
k = stat['key']
if self[k] is None:
self[k] = other[k]
elif other[k] is None:
pass
else:
self[k] = stat['sum']([self[k], other[k]])
return self
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/compute.py
|
compute.py
|
import colorsys
import math
from aptus import data_file
# Pure data-munging functions
def _255(*vals):
""" Convert all arguments from 0-1.0 to 0-255.
"""
return [int(round(x * 255)) for x in vals]
def _1(*vals):
""" Convert all arguments from 0-255 to 0-1.0.
"""
return [x/255.0 for x in vals]
def _clip(val, lo, hi):
""" Clip a val to staying between lo and hi.
"""
if val < lo:
val = lo
if val > hi:
val = hi
return val
class Palette:
""" A palette is a list of colors for coloring the successive bands of the
Mandelbrot set.
colors is a list of RGB triples, 0-255, for display.
fcolors is a list of RGB triples, 0.0-1.0, for computation.
incolor is the RGB255 color for the interior of the set.
_spec is a value that can be passed to from_spec to reconstitute the
palette. It's returned by the spec property.
"""
default_adjusts = {'hue': 0, 'saturation': 0}
def __init__(self):
self.incolor = (0,0,0)
# HSL colors, range 0-1.
self.fcolors = [(0.0,0.0,0.0), (1.0,1.0,1.0)]
# RGB colors, range 0-255.
self.colors = []
# RGB colors as one bytestring
self._colorbytes = b""
self._spec = []
self.adjusts = dict(self.default_adjusts)
self.wrap = True
self._colors_from_fcolors()
def __len__(self):
return len(self.fcolors)
def __eq__(self, other):
return self.colors == other.colors
def __ne__(self, other):
return not self.__eq__(other)
def _colors_from_fcolors(self):
""" Set self.colors from self.fcolors, adjusting them for hue, etc,
in the process.
"""
self.colors = []
hue_adj = self.adjusts['hue']/360.0
sat_adj = self.adjusts['saturation']/255.0
for h, l, s in self.fcolors:
h = (h + hue_adj) % 1.0
s = _clip(s + sat_adj, 0.0, 1.0)
self.colors.append(_255(*colorsys.hls_to_rgb(h, l, s)))
self._colorbytes = b""
def color_bytes(self):
""" Compute a string of RGB bytes for use in the engine.
"""
if not self._colorbytes:
colbytes = b"".join(bytes([r, g, b]) for r,g,b in self.colors)
self._colorbytes = colbytes
return self._colorbytes
def spec(self):
""" Create a textual description of the palette, for later reconstitution
with from_spec().
"""
s = self._spec[:]
if self.adjusts != self.default_adjusts:
s.append(['adjust', self.adjusts])
if self.incolor != (0,0,0):
s.append(['rgb_incolor', {'color': self.incolor}])
if not self.wrap:
s.append(['wrapping', {'wrap': 0}])
return s
def rgb_colors(self, colors):
""" Use an explicit list of RGB 0-255 colors as the palette.
"""
self.colors = colors[:]
self.fcolors = [colorsys.rgb_to_hls(*_1(*rgb255)) for rgb255 in self.colors]
self._colorbytes = None
self._spec.append(['rgb_colors', {'colors':colors}])
return self
def spectrum(self, ncolors, h=(0,360), l=(50,200), s=150):
if isinstance(h, (int, float)):
h = (int(h), int(h))
if isinstance(l, (int, float)):
l = (int(l), int(l))
if isinstance(s, (int, float)):
s = (int(s), int(s))
hlo, hhi = h
llo, lhi = l
slo, shi = s
self.fcolors = []
for pt in range(ncolors//2):
hfrac = (pt*1.0/(ncolors/2))
hue = hlo + (hhi-hlo)*hfrac
self.fcolors.append((hue/360.0, llo/255.0, slo/255.0))
hfrac = (pt*1.0+0.5)/(ncolors/2)
hue = hlo + (hhi-hlo)*hfrac
self.fcolors.append((hue/360.0, lhi/255.0, shi/255.0))
self._colors_from_fcolors()
args = {'ncolors':ncolors}
if h != (0,360):
if hlo == hhi:
args['h'] = hlo
else:
args['h'] = h
if l != (50,200):
if llo == lhi:
args['l'] = llo
else:
args['l'] = l
if s != (150,150):
if slo == shi:
args['s'] = slo
else:
args['s'] = s
self._spec.append(['spectrum', args])
return self
def stretch(self, steps, hsl=False, ease=None):
""" Interpolate between colors in the palette, stretching it out.
Works in either RGB or HSL space.
"""
fcolors = [None]*(len(self.fcolors)*steps)
for i in range(len(fcolors)):
color_index = i//steps
a0, b0, c0 = self.fcolors[color_index]
a1, b1, c1 = self.fcolors[(color_index + 1) % len(self.fcolors)]
if hsl:
if a1 < a0 and a0-a1 > 0.01:
a1 += 1
else:
a0, b0, c0 = colorsys.hls_to_rgb(a0, b0, c0)
a1, b1, c1 = colorsys.hls_to_rgb(a1, b1, c1)
step = i % steps / steps
if ease == "sine":
step = -(math.cos(math.pi * step) - 1) / 2;
elif isinstance(ease, (int, float)):
if step < 0.5:
step = math.pow(2 * step, ease) / 2
else:
step = 1 - math.pow(-2 * step + 2, ease) / 2
ax, bx, cx = (
a0 + (a1 - a0) * step,
b0 + (b1 - b0) * step,
c0 + (c1 - c0) * step,
)
if not hsl:
ax, bx, cx = colorsys.rgb_to_hls(ax, bx, cx)
fcolors[i] = (ax, bx, cx)
self.fcolors = fcolors
self._colors_from_fcolors()
self._spec.append(['stretch', {'steps':steps, 'hsl':hsl, 'ease':ease}])
return self
def adjust(self, hue=0, saturation=0):
""" Make adjustments to various aspects of the display of the palette.
0 <= hue <= 360
0 <= saturation <= 255
"""
adj = self.adjusts
adj['hue'] = (adj['hue'] + hue) % 360
adj['saturation'] = _clip(adj['saturation'] + saturation, -255, 255)
self._colors_from_fcolors()
return self
def reset(self):
""" Reset all palette adjustments.
"""
self.adjusts = {'hue': 0, 'saturation': 0}
self._colors_from_fcolors()
return self
def rgb_incolor(self, color):
""" Set the color for the interior of the Mandelbrot set.
"""
self.incolor = color
return self
def wrapping(self, wrap):
""" Set the wrap boolean on or off.
"""
self.wrap = wrap
return self
def gradient(self, ggr_file, ncolors):
""" Create the palette from a GIMP .ggr gradient file.
"""
from aptus.ggr import GimpGradient
ggr = GimpGradient()
try:
ggr.read(ggr_file)
self.fcolors = [
colorsys.rgb_to_hls(*ggr.color(float(c)/ncolors)) for c in range(ncolors)
]
except IOError:
self.fcolors = [ (0.0,0.0,0.0), (1.0,0.0,0.0), (1.0,1.0,1.0) ]
self._colors_from_fcolors()
self._spec.append(['gradient', {'ggr_file':ggr_file, 'ncolors':ncolors}])
return self
def xaos(self):
# Colors taken from Xaos, to get the same rendering.
xaos_colors = [
(0, 0, 0),
(120, 119, 238),
(24, 7, 25),
(197, 66, 28),
(29, 18, 11),
(135, 46, 71),
(24, 27, 13),
(241, 230, 128),
(17, 31, 24),
(240, 162, 139),
(11, 4, 30),
(106, 87, 189),
(29, 21, 14),
(12, 140, 118),
(10, 6, 29),
(50, 144, 77),
(22, 0, 24),
(148, 188, 243),
(4, 32, 7),
(231, 146, 14),
(10, 13, 20),
(184, 147, 68),
(13, 28, 3),
(169, 248, 152),
(4, 0, 34),
(62, 83, 48),
(7, 21, 22),
(152, 97, 184),
(8, 3, 12),
(247, 92, 235),
(31, 32, 16)
]
self.rgb_colors(xaos_colors)
del self._spec[-1]
self.stretch(8, hsl=False)
del self._spec[-1]
self._spec.append(['xaos', {}])
return self
def from_spec(self, spec):
for op, args in spec:
getattr(self, op)(**args)
return self
all_palettes = [
Palette().spectrum(12).stretch(10, hsl=True),
Palette().spectrum(12).stretch(10, hsl=True, ease="sine"),
Palette().spectrum(12, l=(50,150), s=150).stretch(25, hsl=True),
Palette().spectrum(12, l=(50,150), s=150).stretch(25, hsl=True, ease="sine"),
Palette().spectrum(64, l=125, s=175),
Palette().spectrum(48, l=(100,150), s=175).stretch(5, hsl=False),
Palette().spectrum(2, h=250, l=(100,150), s=175).stretch(10, hsl=True),
Palette().spectrum(2, h=290, l=(75,175), s=(230,25)).stretch(10, hsl=True),
Palette().spectrum(16, l=125, s=175),
Palette().xaos(),
Palette().spectrum(2, h=120, l=(50,200), s=125).stretch(128, hsl=True),
Palette().rgb_colors([(0x00,0x28,0x68), (0xFF, 0xFF, 0xFF), (0xBF, 0x0A, 0x30), (0xFF, 0xFF, 0xFF)]).stretch(4, ease="sine"),
Palette().rgb_colors([(255,255,255), (0,0,0), (0,0,0), (0,0,0)]),
Palette().rgb_colors([(255,255,255)]),
]
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/palettes.py
|
palettes.py
|
__version__ = '1.0.20070915'
import colorsys
import math
class GimpGradient:
""" Read and interpret a Gimp .ggr gradient file.
"""
def __init__(self, f=None):
if f:
self.read(f)
class _segment:
pass
def read(self, f):
""" Read a .ggr file from f (either an open file or a file path).
"""
if isinstance(f, str):
f = open(f)
if f.readline().strip() != "GIMP Gradient":
raise IOError("Not a GIMP gradient file")
line = f.readline().strip()
if not line.startswith("Name: "):
raise IOError("Not a GIMP gradient file")
self.name = line.split(": ", 1)[1]
nsegs = int(f.readline().strip())
self.segs = []
for dummy in range(nsegs):
line = f.readline().strip()
seg = self._segment()
(seg.l, seg.m, seg.r,
seg.rl, seg.gl, seg.bl, _,
seg.rr, seg.gr, seg.br, _,
seg.fn, seg.space) = map(float, line.split())
self.segs.append(seg)
def color(self, x):
""" Get the color for the point x in the range [0..1).
The color is returned as an rgb triple, with all values in the range
[0..1).
"""
# Find the segment.
for s in self.segs:
if s.l <= x <= s.r:
seg = s
break
else:
# No segment applies! Return black I guess.
return (0,0,0)
# Normalize the segment geometry.
mid = (seg.m - seg.l)/(seg.r - seg.l)
pos = (x - seg.l)/(seg.r - seg.l)
# Assume linear (most common, and needed by most others).
if pos <= mid:
f = pos/mid/2
else:
f = (pos - mid)/(1 - mid)/2 + 0.5
# Find the correct interpolation factor.
if seg.fn == 1: # Curved
f = math.pow(pos, math.log(0.5) / math.log(mid))
elif seg.fn == 2: # Sinusoidal
f = (math.sin((-math.pi/2) + math.pi*f) + 1)/2
elif seg.fn == 3: # Spherical increasing
f -= 1
f = math.sqrt(1 - f*f)
elif seg.fn == 4: # Spherical decreasing
f = 1 - math.sqrt(1 - f*f)
# Interpolate the colors
if seg.space == 0:
c = (
seg.rl + (seg.rr-seg.rl) * f,
seg.gl + (seg.gr-seg.gl) * f,
seg.bl + (seg.br-seg.bl) * f
)
elif seg.space in (1,2):
hl, sl, vl = colorsys.rgb_to_hsv(seg.rl, seg.gl, seg.bl)
hr, sr, vr = colorsys.rgb_to_hsv(seg.rr, seg.gr, seg.br)
if seg.space == 1 and hr < hl:
hr += 1
elif seg.space == 2 and hr > hl:
hr -= 1
c = colorsys.hsv_to_rgb(
(hl + (hr-hl) * f) % 1.0,
sl + (sr-sl) * f,
vl + (vr-vl) * f
)
return c
def test_it():
import sys, wx
class GgrView(wx.Frame):
def __init__(self, ggr, chunks):
""" Display the ggr file as a strip of colors.
If chunks is non-zero, then also display the gradient quantized
into that many chunks.
"""
super(GgrView, self).__init__(None, -1, 'Ggr: %s' % ggr.name)
self.ggr = ggr
self.chunks = chunks
self.SetSize((600, 100))
self.panel = wx.Panel(self)
self.panel.Bind(wx.EVT_PAINT, self.on_paint)
self.panel.Bind(wx.EVT_SIZE, self.on_size)
def on_paint(self, event_unused):
dc = wx.PaintDC(self.panel)
cw_unused, ch = self.GetClientSize()
if self.chunks:
self.paint_some(dc, 0, 0, ch/2)
self.paint_some(dc, self.chunks, ch/2, ch)
else:
self.paint_some(dc, 0, 0, ch)
def paint_some(self, dc, chunks, y0, y1):
cw, ch_unused = self.GetClientSize()
chunkw = 1
if chunks:
chunkw = (cw // chunks) or 1
for x in range(0, cw, chunkw):
c = [int(255*x) for x in ggr.color(float(x)/cw)]
dc.SetPen(wx.Pen(wx.Colour(*c), 1))
dc.SetBrush(wx.Brush(wx.Colour(*c), wx.SOLID))
dc.DrawRectangle(x, y0, chunkw, y1-y0)
def on_size(self, event_unused):
self.Refresh()
app = wx.PySimpleApp()
ggr = GimpGradient(sys.argv[1])
chunks = 0
if len(sys.argv) > 2:
chunks = int(sys.argv[2])
f = GgrView(ggr, chunks)
f.Show()
app.MainLoop()
if __name__ == '__main__':
test_it()
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/ggr.py
|
ggr.py
|
import json
import optparse
import re
from PIL import Image
from aptus.palettes import Palette
description = """\
Aptus renders Mandelbrot set images. Three flavors are available:
aptusweb and aptusgui for interactive exploration, and aptuscmd for
high-quality rendering.
""".replace('\n', ' ')
class AptusOptions:
""" An option parser for Aptus states.
"""
def __init__(self, target):
""" Create an AptusOptions parser. Attributes are set on the target, which
should be an AptusCompute-like thing.
"""
self.target = target
def _create_parser(self):
parser = optparse.OptionParser(
usage="%prog [options] [parameterfile]",
description=description
)
parser.add_option("-a", "--angle", dest="angle", help="set the angle of rotation")
parser.add_option("--center", dest="center", help="set the center of the view", metavar="RE,IM")
parser.add_option("-c", "--continuous", dest="continuous", help="use continuous coloring", action="store_true")
parser.add_option("--diam", dest="diam", help="set the diameter of the view")
parser.add_option("-i", "--iterlimit", dest="iter_limit", help="set the limit on the iteration count")
parser.add_option("-o", "--output", dest="outfile", help="set the output filename (aptuscmd only)")
parser.add_option("--phase", dest="palette_phase", help="set the palette phase", metavar="PHASE")
parser.add_option("--pscale", dest="palette_scale", help="set the palette scale", metavar="SCALE")
parser.add_option("-s", "--size", dest="size", help="set the pixel size of the image", metavar="WIDxHGT")
parser.add_option("--super", dest="supersample",
help="set the supersample rate (aptuscmd only)", metavar="S")
return parser
def _pair(self, s, cast):
""" Convert a string argument to a pair of other casted values.
"""
vals = list(map(cast, re.split("[,x]", s)))
if len(vals) == 1:
vals = vals*2
return vals
def _int_pair(self, s):
""" Convert a string argument to a pair of ints.
"""
return self._pair(s, int)
def _float_pair(self, s):
""" Convert a string argument to a pair of floats.
"""
return self._pair(s, float)
def read_args(self, argv):
""" Read aptus options from the provided argv.
"""
parser = self._create_parser()
options, args = parser.parse_args(argv)
if len(args) > 0:
self.opts_from_file(args[0])
if options.angle:
self.target.angle = float(options.angle)
if options.center:
self.target.center = self._float_pair(options.center)
if options.continuous:
self.target.continuous = options.continuous
if options.diam:
self.target.diam = self._float_pair(options.diam)
if options.iter_limit:
self.target.iter_limit = int(options.iter_limit)
if options.outfile:
self.target.outfile = options.outfile
if options.palette_phase:
self.target.palette_phase = int(options.palette_phase)
if options.palette_scale:
self.target.palette_scale = float(options.palette_scale)
if options.size:
self.target.size = self._int_pair(options.size)
if options.supersample:
self.target.supersample = int(options.supersample)
def options_help(self):
""" Return the help text about the command line options.
"""
parser = self._create_parser()
return parser.format_help()
def opts_from_file(self, fname):
""" Read aptus options from the given filename. Various forms of input
file are supported.
"""
if fname.endswith('.aptus'):
aptst = AptusState(self.target)
aptst.read(fname)
elif fname.endswith('.xpf'):
xaos = XaosState()
xaos.read(fname)
self.target.center = xaos.center
self.target.diam = xaos.diam
self.target.angle = xaos.angle
self.target.iter_limit = xaos.maxiter
self.target.palette = xaos.palette
self.target.palette_phase = xaos.palette_phase
elif fname.endswith('.png'):
im = Image.open(fname)
if "Aptus State" in im.info:
aptst = AptusState(self.target)
aptst.read_string(im.info["Aptus State"])
else:
raise Exception("PNG file has no Aptus state information: %s" % fname)
else:
raise Exception("Don't know how to read options from %s" % fname)
class AptusStateError(Exception):
pass
class AptusState:
""" A serialization class for the state of an Aptus rendering.
The result is a JSON representation.
"""
def __init__(self, target):
self.target = target
def write(self, f):
if isinstance(f, str):
f = open(f, "w")
f.write(self.write_string())
simple_attrs = "center diam angle iter_limit palette_phase palette_scale supersample continuous mode".split()
julia_attrs = "rijulia".split()
def write_attrs(self, d, attrs):
for a in attrs:
d[a] = getattr(self.target, a)
def write_string(self):
d = {'Aptus State':1}
self.write_attrs(d, self.simple_attrs)
d['size'] = list(self.target.size)
d['palette'] = self.target.palette.spec()
if self.target.mode == 'julia':
self.write_attrs(d, self.julia_attrs)
return json.dumps(d)
def read_attrs(self, d, attrs):
for a in attrs:
if a in d:
setattr(self.target, a, d[a])
def read(self, f):
if isinstance(f, str):
f = open(f, 'r')
return self.read_string(f.read())
def read_string(self, s):
d = json.loads(s)
self.read_attrs(d, self.simple_attrs)
self.target.palette = Palette().from_spec(d['palette'])
self.target.size = d['size']
self.read_attrs(d, self.julia_attrs)
class XaosState:
""" The state of a Xaos rendering.
"""
def __init__(self):
self.maxiter = 170
self.center = -0.75, 0.0
self.diam = 2.55, 2.55
self.angle = 0.0
self.palette_phase = 0
self.palette = Palette().xaos()
def read(self, f):
if isinstance(f, str):
f = open(f)
for l in f:
if l.startswith('('):
argv = l[1:-2].split()
if hasattr(self, 'handle_'+argv[0]):
getattr(self, 'handle_'+argv[0])(*argv)
def handle_maxiter(self, op_unused, maxiter):
self.maxiter = int(maxiter)
def handle_view(self, op_unused, cr, ci, rr, ri):
# Xaos writes i coordinates inverted.
self.center = self.read_float(cr), -self.read_float(ci)
self.diam = self.read_float(rr), self.read_float(ri)
def handle_shiftpalette(self, op_unused, phase):
self.palette_phase = int(phase)
def handle_angle(self, op_unused, angle):
self.angle = self.read_float(angle)
def read_float(self, fstr):
# Xaos writes out floats with extra characters tacked on the end sometimes.
# Very ad-hoc: try converting to float, and if it fails, chop off trailing
# chars until it works.
while True:
try:
return float(fstr)
except ValueError:
fstr = fstr[:-1]
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/options.py
|
options.py
|
import wx
from aptus.gui.computepanel import MiniComputePanel
from aptus.gui.ids import *
from aptus.gui.misc import AptusToolFrame, ListeningWindowMixin
class JuliaPanel(MiniComputePanel, ListeningWindowMixin):
""" A panel displaying the Julia set for the current point in another window.
"""
def __init__(self, parent, viewwin, size=wx.DefaultSize):
""" Create a JuliaPanel, with `parent` as its parent, and `viewwin` as
the window to track.
"""
MiniComputePanel.__init__(self, parent, size=size)
ListeningWindowMixin.__init__(self)
self.viewwin = viewwin
self.register_listener(self.on_coloring_changed, EVT_APTUS_COLORING_CHANGED, self.viewwin)
self.register_listener(self.draw_julia, EVT_APTUS_INDICATEPOINT, self.viewwin)
self.compute.center, self.compute.diam = (0.0,0.0), (3.0,3.0)
self.compute.mode = 'julia'
self.on_coloring_changed(None)
# Need to call update_info after the window appears, so that the widths of
# the text controls can be set properly. Else, it all appears left-aligned.
wx.CallAfter(self.draw_julia)
def draw_julia(self, event=None):
# Different events will trigger this, be flexible about how to get the
# mouse position.
if event and hasattr(event, 'point'):
pt = event.point
else:
pt = self.viewwin.ScreenToClient(wx.GetMousePosition())
pt_info = self.viewwin.get_point_info(pt)
if pt_info:
self.compute.rijulia = pt_info['r'], pt_info['i']
self.compute.iter_limit = 1000
else:
self.compute.rijulia = 0,0
self.compute.create_mandel()
self.computation_changed()
# Need to let the main window handle the event too.
if event:
event.Skip()
def on_coloring_changed(self, event_unused):
if self.compute.copy_coloring(self.viewwin.compute):
self.coloring_changed()
class JuliaFrame(AptusToolFrame):
def __init__(self, mainframe, viewwin):
AptusToolFrame.__init__(self, mainframe, title='Julia Set', size=(180,180))
self.panel = JuliaPanel(self, viewwin)
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/juliapanel.py
|
juliapanel.py
|
import wx
from aptus import settings
from aptus.gui.computepanel import ComputePanel
from aptus.gui.ids import *
from aptus.palettes import all_palettes
from aptus.progress import ConsoleProgressReporter, IntervalProgressReporter
# A pre-set list of places to visit, with the j command.
JUMPS = [
(settings.center(), settings.diam()),
((-1.8605294939875601,-1.0475516319329809e-005), (2.288818359375e-005,2.288818359375e-005)),
((-1.8605327731370924,-1.2700557708795141e-005), (1.7881393432617188e-007,1.7881393432617188e-007)),
((0.45687170535326038,0.34780396997928614), (0.005859375,0.005859375)),
]
class AptusViewPanel(ComputePanel):
""" A panel implementing the primary Aptus view and controller.
"""
def __init__(self, parent):
ComputePanel.__init__(self, parent)
self.compute.quiet = False
# Bind input events.
self.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_MIDDLE_DOWN, self.on_middle_down)
self.Bind(wx.EVT_MOTION, self.on_motion)
self.Bind(wx.EVT_LEFT_UP, self.on_left_up)
self.Bind(wx.EVT_MIDDLE_UP, self.on_middle_up)
self.Bind(wx.EVT_RIGHT_UP, self.on_right_up)
self.Bind(wx.EVT_LEAVE_WINDOW, self.on_leave_window)
self.Bind(wx.EVT_KEY_DOWN, self.on_key_down)
self.Bind(wx.EVT_KEY_UP, self.on_key_up)
self.Bind(wx.EVT_KILL_FOCUS, self.on_kill_focus)
self.Bind(wx.EVT_SET_FOCUS, self.on_set_focus)
self.Bind(wx.EVT_MENU, self.cmd_set_angle, id=id_set_angle)
self.Bind(wx.EVT_MENU, self.cmd_set_iter_limit, id=id_set_iter_limit)
self.Bind(wx.EVT_MENU, self.cmd_toggle_continuous, id=id_toggle_continuous)
self.Bind(wx.EVT_MENU, self.cmd_jump, id=id_jump)
self.Bind(wx.EVT_MENU, self.cmd_redraw, id=id_redraw)
self.Bind(wx.EVT_MENU, self.cmd_change_palette, id=id_change_palette)
self.Bind(wx.EVT_MENU, self.cmd_set_palette, id=id_set_palette)
self.Bind(wx.EVT_MENU, self.cmd_cycle_palette, id=id_cycle_palette)
self.Bind(wx.EVT_MENU, self.cmd_scale_palette, id=id_scale_palette)
self.Bind(wx.EVT_MENU, self.cmd_adjust_palette, id=id_adjust_palette)
self.Bind(wx.EVT_MENU, self.cmd_reset_palette, id=id_reset_palette)
self.reset_mousing()
# Gui state values
self.palette_index = 0 # The index of the currently displayed palette
self.jump_index = 0 # The index of the last jumped-to spot.
self.zoom = 2.0 # A constant zoom amt per click.
# Input methods
def reset_mousing(self):
""" Set all the mousing variables to turn off rubberbanding and panning.
"""
self.pt_down = None
self.rubberbanding = False
self.rubberrect = None
# Panning information.
self.panning = False
self.pt_pan = None
self.pan_locked = False
# When shift is down, then we're indicating points.
self.indicating_pt = False
self.indicated_pt = (-1, -1)
def finish_panning(self, mx, my):
if not self.pt_down:
return
cx, cy = self.compute.size[0]/2.0, self.compute.size[1]/2.0
cx -= mx - self.pt_down[0]
cy -= my - self.pt_down[1]
self.compute.center = self.compute.coords_from_pixel(cx, cy)
self.geometry_changed()
def xor_rectangle(self, rect):
dc = wx.ClientDC(self)
dc.SetLogicalFunction(wx.XOR)
dc.SetBrush(wx.Brush(wx.WHITE, wx.TRANSPARENT))
dc.SetPen(wx.Pen(wx.WHITE, 1, wx.SOLID))
dc.DrawRectangle(*rect)
def set_cursor(self, event_unused):
# If we aren't taking input, then we shouldn't change the cursor.
if not self.GetTopLevelParent().IsActive():
return
# Set the proper cursor:
if self.rubberbanding:
self.SetCursor(wx.Cursor(wx.CURSOR_MAGNIFIER))
elif self.panning:
self.SetCursor(wx.Cursor(wx.CURSOR_SIZING))
elif self.indicating_pt:
import aptus.gui.resources
curimg = aptus.gui.resources.getCrosshairImage()
curimg.SetOption(wx.IMAGE_OPTION_CUR_HOTSPOT_X, 7)
curimg.SetOption(wx.IMAGE_OPTION_CUR_HOTSPOT_Y, 7)
cur = wx.Cursor(curimg)
self.SetCursor(cur)
#self.SetCursor(wx.Cursor(wx.CURSOR_CROSS))
else:
self.SetCursor(wx.Cursor(wx.CURSOR_DEFAULT))
def indicate_point(self, event):
""" Use the given event to indicate a point, maybe.
"""
if hasattr(event, 'ShiftDown'):
self.indicating_pt = event.ShiftDown()
else:
self.indicating_pt = wx.GetMouseState().shiftDown
if self.indicating_pt:
if hasattr(event, 'GetPosition'):
pt = event.GetPosition()
else:
ms = wx.GetMouseState()
pt = self.ScreenToClient((ms.x, ms.y))
if self.GetRect().Contains(pt) and pt != self.indicated_pt:
self.indicated_pt = pt
self.fire_event(AptusIndicatePointEvent, point=pt)
def dilate_view(self, center, scale):
""" Change the view by a certain scale factor, keeping the center in the
same spot.
"""
cx = center[0] + (self.compute.size[0]/2 - center[0]) * scale
cy = center[1] + (self.compute.size[1]/2 - center[1]) * scale
self.compute.center = self.compute.coords_from_pixel(cx, cy)
self.compute.diam = (self.compute.diam[0]*scale, self.compute.diam[1]*scale)
self.geometry_changed()
def make_progress_reporter(self):
# Construct a progress reporter that suits us. Write to the console,
# but only once a second.
return IntervalProgressReporter(1, ConsoleProgressReporter())
# Event handlers
def on_idle(self, event):
self.indicate_point(event)
self.set_cursor(event)
ComputePanel.on_idle(self, event)
def on_paint(self, event_unused):
if not self.bitmap:
self.bitmap = self.draw_bitmap()
dc = wx.AutoBufferedPaintDC(self)
if self.panning:
dc.SetBrush(wx.Brush(wx.Colour(224,224,128), wx.SOLID))
dc.SetPen(wx.Pen(wx.Colour(224,224,128), 1, wx.SOLID))
dc.DrawRectangle(0, 0, self.compute.size[0], self.compute.size[1])
dc.DrawBitmap(self.bitmap, self.pt_pan[0]-self.pt_down[0], self.pt_pan[1]-self.pt_down[1], False)
else:
dc.DrawBitmap(self.bitmap, 0, 0, False)
def on_left_down(self, event):
#print(wx.Window.FindFocus())
self.pt_down = event.GetPosition()
self.rubberbanding = False
if self.panning:
self.pt_pan = self.pt_down
self.pan_locked = False
def on_middle_down(self, event):
self.pt_down = event.GetPosition()
self.rubberbanding = False
self.panning = True
self.pt_pan = self.pt_down
self.pan_locked = False
def on_motion(self, event):
self.indicate_point(event)
self.set_cursor(event)
# We do nothing with mouse moves that aren't dragging.
if not self.pt_down:
return
mx, my = event.GetPosition()
if self.panning:
if self.pt_pan != (mx, my):
# We've moved the image: redraw it.
self.pt_pan = (mx, my)
self.pan_locked = True
self.Refresh()
else:
if not self.rubberbanding:
# Start rubberbanding when we have a 10-pixel rectangle at least.
if abs(self.pt_down[0] - mx) > 10 or abs(self.pt_down[1] - my) > 10:
self.rubberbanding = True
if self.rubberbanding:
if self.rubberrect:
# Erase the old rectangle.
self.xor_rectangle(self.rubberrect)
self.rubberrect = (self.pt_down[0], self.pt_down[1], mx-self.pt_down[0], my-self.pt_down[1])
self.xor_rectangle(self.rubberrect)
def on_left_up(self, event):
mx, my = event.GetPosition()
if self.rubberbanding:
# Set a new view that encloses the rectangle.
px, py = self.pt_down
ulr, uli = self.compute.coords_from_pixel(px, py)
lrr, lri = self.compute.coords_from_pixel(mx, my)
self.set_geometry(corners=(ulr, uli, lrr, lri))
elif self.panning:
self.finish_panning(mx, my)
elif self.pt_down:
# Single-click: zoom in.
scale = self.zoom
if event.CmdDown():
scale = (scale - 1.0)/10 + 1.0
self.dilate_view((mx, my), 1.0/scale)
self.reset_mousing()
def on_middle_up(self, event):
self.finish_panning(*event.GetPosition())
self.reset_mousing()
def on_right_up(self, event):
scale = self.zoom
if event.CmdDown():
scale = (scale - 1.0)/10 + 1.0
self.dilate_view(event.GetPosition(), scale)
self.reset_mousing()
def on_leave_window(self, event):
if self.rubberrect:
self.xor_rectangle(self.rubberrect)
if self.panning:
self.finish_panning(*event.GetPosition())
self.reset_mousing()
def on_key_down(self, event):
# Turn keystrokes into commands.
shift = event.ShiftDown()
cmd = event.CmdDown()
keycode = event.KeyCode
#print("Look:", keycode)
if keycode == ord('A'):
self.fire_command(id_set_angle)
elif keycode == ord('C'):
self.fire_command(id_toggle_continuous)
elif keycode == ord('F'):
self.fire_command(id_fullscreen)
elif keycode == ord('H'):
self.fire_command(id_help)
elif keycode == ord('I'):
self.fire_command(id_set_iter_limit)
elif keycode == ord('J'):
if shift:
self.fire_command(id_show_julia)
else:
self.fire_command(id_jump)
elif keycode == ord('L'):
self.fire_command(id_show_youarehere)
elif keycode == ord('N'):
self.fire_command(id_new)
elif keycode == ord('O'):
self.fire_command(id_open)
elif keycode == ord('P'):
self.fire_command(id_show_palettes)
elif keycode == ord('Q'):
self.fire_command(id_show_pointinfo)
elif keycode == ord('R'):
self.fire_command(id_redraw)
elif keycode == ord('S'):
self.fire_command(id_save)
elif keycode == ord('V'):
self.fire_command(id_show_stats)
elif keycode == ord('W'):
self.fire_command(id_window_size)
elif keycode == ord('0'): # zero
self.fire_command(id_reset_palette)
elif keycode in [ord(','), ord('<')]:
if shift:
self.fire_command(id_change_palette, -1)
else:
self.fire_command(id_cycle_palette, -1)
elif keycode in [ord('.'), ord('>')]:
if shift:
self.fire_command(id_change_palette, 1)
else:
self.fire_command(id_cycle_palette, 1)
elif keycode == ord(';'):
self.fire_command(id_scale_palette, 1/(1.01 if cmd else 1.1))
elif keycode == ord("'"):
self.fire_command(id_scale_palette, 1.01 if cmd else 1.1)
elif keycode in [ord('['), ord(']')]:
kw = 'hue'
delta = 1 if cmd else 10
if keycode == ord('['):
delta = -delta
if shift:
kw = 'saturation'
self.fire_command(id_adjust_palette, {kw:delta})
elif keycode == ord(' '):
self.panning = True
elif keycode == ord('/') and shift:
self.fire_command(id_help)
elif 0:
# Debugging aid: find the symbol for the key we didn't handle.
revmap = dict([(getattr(wx,n), n) for n in dir(wx) if n.startswith('WXK')])
sym = revmap.get(keycode, "")
if not sym:
sym = "ord(%r)" % chr(keycode)
#print("Unmapped key: %r, %s, shift=%r, cmd=%r" % (keycode, sym, shift, cmd))
def on_key_up(self, event):
keycode = event.KeyCode
if keycode == ord(' '):
if not self.pan_locked:
self.panning = False
def on_set_focus(self, event):
pass #print("Set focus")
def on_kill_focus(self, event):
return
import traceback; traceback.print_stack()
print("Kill focus to %r" % event.GetWindow())
print("Parent: %r" % self.GetParent())
if self.GetParent():
print("Isactive: %r" % self.GetParent().IsActive())
# Command helpers
def set_value(self, dtitle, dprompt, attr, caster, when_done):
cur_val = getattr(self.compute, attr)
dlg = wx.TextEntryDialog(self.GetTopLevelParent(), dtitle, dprompt, str(cur_val))
if dlg.ShowModal() == wx.ID_OK:
try:
setattr(self.compute, attr, caster(dlg.GetValue()))
when_done()
except ValueError as e:
self.message("Couldn't set %s: %s" % (attr, e))
dlg.Destroy()
def palette_changed(self):
""" Use the self.palette_index to set a new palette.
"""
self.compute.palette = all_palettes[self.palette_index]
self.compute.palette_phase = 0
self.compute.palette_scale = 1.0
self.coloring_changed()
# Commands
def cmd_set_angle(self, event_unused):
self.set_value('Angle:', 'Set the angle of rotation', 'angle', float, self.geometry_changed)
def cmd_set_iter_limit(self, event_unused):
self.set_value('Iteration limit:', 'Set the iteration limit', 'iter_limit', int, self.computation_changed)
def cmd_toggle_continuous(self, event_unused):
self.compute.continuous = not self.compute.continuous
self.computation_changed()
def cmd_redraw(self, event_unused):
self.compute.clear_results()
self.set_view()
def cmd_jump(self, event_unused):
self.jump_index += 1
self.jump_index %= len(JUMPS)
self.compute.center, self.compute.diam = JUMPS[self.jump_index]
self.geometry_changed()
def cmd_cycle_palette(self, event):
delta = event.GetClientData()
self.compute.palette_phase += delta
self.coloring_changed()
def cmd_scale_palette(self, event):
factor = event.GetClientData()
if self.compute.continuous:
self.compute.palette_scale *= factor
self.coloring_changed()
def cmd_change_palette(self, event):
delta = event.GetClientData()
self.palette_index += delta
self.palette_index %= len(all_palettes)
self.palette_changed()
def cmd_set_palette(self, event):
self.palette_index = event.GetClientData()
self.palette_changed()
def cmd_adjust_palette(self, event):
self.compute.palette.adjust(**event.GetClientData())
self.coloring_changed()
def cmd_reset_palette(self, event_unused):
self.compute.palette_phase = 0
self.compute.palette_scale = 1.0
self.compute.palette.reset()
self.coloring_changed()
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/viewpanel.py
|
viewpanel.py
|
import wx
from wx.lib.scrolledpanel import ScrolledPanel
from aptus.gui.ids import *
from aptus.gui.misc import AptusToolFrame, ListeningWindowMixin
class PaletteWin(wx.Window):
""" A window for displaying a single palette. Handles click events to
change the palette in the view window.
"""
def __init__(self, parent, palette, ipal, viewwin, size=wx.DefaultSize):
wx.Window.__init__(self, parent, size=size)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.palette = palette
self.ipal = ipal
self.viewwin = viewwin
self.selected = False
self.Bind(wx.EVT_PAINT, self.on_paint)
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_LEFT_UP, self.on_left_up)
def on_paint(self, event_unused):
# Geometry: client size and margin widths.
cw, ch = self.GetClientSize()
mt, mr, mb, ml = 3, 6, 3, 6
dc = wx.AutoBufferedPaintDC(self)
# Paint the background.
if self.selected:
color = wx.Colour(128, 128, 128)
else:
color = wx.Colour(255, 255, 255)
dc.SetPen(wx.TRANSPARENT_PEN)
dc.SetBrush(wx.Brush(color, wx.SOLID))
dc.DrawRectangle(0, 0, cw, ch)
# Paint the palette
ncolors = len(self.palette.colors)
width = float(cw-mr-ml-2)/ncolors
for c in range(0, ncolors):
dc.SetPen(wx.TRANSPARENT_PEN)
dc.SetBrush(wx.Brush(wx.Colour(*self.palette.colors[c]), wx.SOLID))
dc.DrawRectangle(int(c*width)+ml+1, mt+1, int(width+1), ch-mt-mb-2)
# Paint the black outline
dc.SetPen(wx.BLACK_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(ml, mt, cw-ml-mr, ch-mt-mb)
def on_size(self, event_unused):
# Since the painting changes everywhere when the width changes, refresh
# on size changes.
self.Refresh()
def on_left_up(self, event_unused):
# Left click: tell the view window to switch to my palette.
self.viewwin.fire_command(id_set_palette, self.ipal)
class PalettesPanel(ScrolledPanel, ListeningWindowMixin):
""" A panel displaying a number of palettes.
"""
def __init__(self, parent, palettes, viewwin, size=wx.DefaultSize):
ScrolledPanel.__init__(self, parent, size=size)
ListeningWindowMixin.__init__(self)
self.viewwin = viewwin
self.palettes = palettes
self.pal_height = 30
self.selected = -1
self.palwins = []
self.sizer = wx.FlexGridSizer(rows=len(self.palettes), cols=1, vgap=0, hgap=0)
for i, pal in enumerate(self.palettes):
palwin = PaletteWin(self, pal, i, viewwin, size=(200, self.pal_height))
self.sizer.Add(palwin, flag=wx.EXPAND)
self.palwins.append(palwin)
self.sizer.AddGrowableCol(0)
self.sizer.SetFlexibleDirection(wx.HORIZONTAL)
self.SetSizer(self.sizer)
self.SetAutoLayout(True)
self.SetupScrolling()
self.register_listener(self.on_coloring_changed, EVT_APTUS_COLORING_CHANGED, self.viewwin)
self.on_coloring_changed(None)
def on_coloring_changed(self, event_unused):
# When the view window's coloring changes, see if the palette changed.
if self.viewwin.palette_index != self.selected:
# Change which of the palettes is selected.
self.palwins[self.selected].selected = False
self.selected = self.viewwin.palette_index
self.palwins[self.selected].selected = True
self.ScrollChildIntoView(self.palwins[self.selected])
self.Refresh()
class PalettesFrame(AptusToolFrame):
""" The top level frame for the palettes list.
"""
def __init__(self, mainframe, palettes, viewwin):
AptusToolFrame.__init__(self, mainframe, title='Palettes', size=(250, 350))
self.panel = PalettesPanel(self, palettes, viewwin)
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/palettespanel.py
|
palettespanel.py
|
import wx
from wx.lib.evtmgr import eventManager
class AptusToolableFrameMixin:
""" A mixin to add to a frame. Tool windows can be attached to this, and
will behave nicely (minimizing, etc).
"""
def __init__(self):
self.toolwins = []
self.Bind(wx.EVT_ICONIZE, self.on_iconize)
self.Bind(wx.EVT_CLOSE, self.on_close)
def add_toolwin(self, toolwin):
self.toolwins.append(toolwin)
def remove_toolwin(self, toolwin):
self.toolwins.remove(toolwin)
def on_iconize(self, event):
bshow = not event.Iconized()
for toolwin in self.toolwins:
toolwin.Show(bshow)
event.Skip()
def on_close(self, event):
for toolwin in self.toolwins:
toolwin.Close()
event.Skip()
class AptusToolFrame(wx.MiniFrame):
""" A frame for tool windows.
"""
# This handles getting the styles right for miniframes.
def __init__(self, mainframe, title='', size=wx.DefaultSize):
# If I pass mainframe into MiniFrame, the focus gets messed up, and keys don't work anymore!? Really, where?
wx.MiniFrame.__init__(self, mainframe, title=title, size=size,
style=wx.DEFAULT_FRAME_STYLE # TODO: | wx.TINY_CAPTION_HORIZONTAL
)
self.mainframe = mainframe
self.mainframe.add_toolwin(self)
self.Bind(wx.EVT_WINDOW_DESTROY, self.on_destroy)
def on_destroy(self, event_unused):
self.mainframe.remove_toolwin(self)
class ListeningWindowMixin:
""" Adds event listening to a window, and deregisters automatically on
destruction.
"""
def __init__(self):
# The eventManager listeners we've registered.
self.listeners = set()
# The raw events we've bound to.
self.events = set()
self.Bind(wx.EVT_WINDOW_DESTROY, self.on_destroy)
def on_destroy(self, event_unused):
for l in self.listeners:
eventManager.DeregisterListener(l)
for other_win, evt in self.events:
other_win.Unbind(evt)
def register_listener(self, fn, evt, sender):
""" Register a listener for an eventManager event. This will be automatically
de-registered when self is destroyed.
"""
eventManager.Register(fn, evt, sender)
self.listeners.add(fn)
def deregister_listener(self, fn):
""" Deregister a previously registered listener.
"""
eventManager.DeregisterListener(fn)
if fn in self.listeners:
self.listeners.remove(fn)
def bind_to_other(self, other_win, evt, fn):
""" Bind to a standard wxPython event on another window. This will be
automatically Unbind'ed when self is destroyed.
"""
other_win.Bind(evt, fn)
self.events.add((other_win, evt))
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/misc.py
|
misc.py
|
import os
import os.path
import re
import wx
import wx.aui
from aptus import data_file
from aptus.gui.ids import *
from aptus.gui.viewpanel import AptusViewPanel
from aptus.gui.misc import AptusToolableFrameMixin
from aptus.options import AptusOptions
class AptusMainFrame(wx.Frame, AptusToolableFrameMixin):
""" The main window frame of the Aptus app.
"""
def __init__(self, args=None, compute=None, size=None):
""" Create an Aptus main GUI frame. `args` is an argv-style list of
command-line arguments. `compute` is an existing compute object to
copy settings from.
"""
wx.Frame.__init__(self, None, -1, 'Aptus')
AptusToolableFrameMixin.__init__(self)
# Make the panel
self.panel = AptusViewPanel(self)
if args:
opts = AptusOptions(self.panel.compute)
opts.read_args(args)
if compute:
self.panel.compute.copy_all(compute)
if size:
self.panel.compute.size = size
self.panel.compute.supersample = 1
if 0:
# Experimental AUI support
self.auimgr = wx.aui.AuiManager()
self.auimgr.SetManagedWindow(self)
self.auimgr.AddPane(self.panel, wx.aui.AuiPaneInfo().Name("grid_content").
PaneBorder(False).CenterPane())
from aptus.gui import pointinfo
self.pointinfo_tool = pointinfo.PointInfoPanel(self, self.panel)
self.auimgr.AddPane(self.pointinfo_tool, wx.aui.AuiPaneInfo().
Name("pointinfo").Caption("Point info").
Right().Layer(1).Position(1).CloseButton(True))
self.auimgr.Update()
# Set the window icon
ib = wx.IconBundle()
ib.AddIcon(data_file("icon48.png"), wx.BITMAP_TYPE_ANY)
ib.AddIcon(data_file("icon32.png"), wx.BITMAP_TYPE_ANY)
ib.AddIcon(data_file("icon16.png"), wx.BITMAP_TYPE_ANY)
self.SetIcons(ib)
# Bind commands
self.Bind(wx.EVT_MENU, self.cmd_new, id=id_new)
self.Bind(wx.EVT_MENU, self.cmd_save, id=id_save)
self.Bind(wx.EVT_MENU, self.cmd_open, id=id_open)
self.Bind(wx.EVT_MENU, self.cmd_help, id=id_help)
self.Bind(wx.EVT_MENU, self.cmd_fullscreen, id=id_fullscreen)
self.Bind(wx.EVT_MENU, self.cmd_window_size, id=id_window_size)
self.Bind(wx.EVT_MENU, self.cmd_show_youarehere, id=id_show_youarehere)
self.Bind(wx.EVT_MENU, self.cmd_show_palettes, id=id_show_palettes)
self.Bind(wx.EVT_MENU, self.cmd_show_stats, id=id_show_stats)
self.Bind(wx.EVT_MENU, self.cmd_show_pointinfo, id=id_show_pointinfo)
self.Bind(wx.EVT_MENU, self.cmd_show_julia, id=id_show_julia)
# Auxilliary frames.
self.youarehere_tool = None
self.palettes_tool = None
self.stats_tool = None
self.pointinfo_tool = None
self.julia_tool = None
# Files can be dropped here.
self.SetDropTarget(MainFrameFileDropTarget(self))
def Show(self, show=True):
# Override Show so we can set the view properly.
if show:
self.SetClientSize(self.panel.compute.size)
self.panel.set_view()
wx.Frame.Show(self, True)
self.panel.SetFocus()
else:
wx.Frame.Show(self, False)
def message(self, msg):
dlg = wx.MessageDialog(self, msg, 'Aptus', wx.OK | wx.ICON_WARNING)
dlg.ShowModal()
dlg.Destroy()
# Command handlers.
def show_file_dialog(self, dlg):
""" Show a file dialog, and do some post-processing on the result.
Returns a pair: type, path.
Type is one of the extensions from the wildcard choices.
"""
if dlg.ShowModal() == wx.ID_OK:
pth = dlg.Path
ext = os.path.splitext(pth)[1].lower()
idx = dlg.FilterIndex
wildcards = dlg.Wildcard.split('|')
wildcard = wildcards[2*idx+1]
if wildcard == '*.*':
if ext:
typ = ext[1:]
else:
typ = ''
elif '*'+ext in wildcards:
# The extension of the file is a recognized extension:
# Use it regardless of the file type chosen in the picker.
typ = ext[1:]
else:
typ = wildcard.split('.')[-1].lower()
if ext == '' and typ != '':
pth += '.' + typ
return typ, pth
else:
return None, None
def cmd_new(self, event_unused):
return wx.GetApp().new_window()
# Files we can open and save.
wildcards = (
"PNG image (*.png)|*.png|"
"Aptus state (*.aptus)|*.aptus|"
"All files (*.*)|*.*"
)
def cmd_save(self, event_unused):
dlg = wx.FileDialog(
self, message="Save", defaultDir=os.getcwd(), defaultFile="",
style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT, wildcard=self.wildcards
)
typ, pth = self.show_file_dialog(dlg)
if typ:
if typ == 'png':
self.panel.write_png(pth)
elif typ == 'aptus':
self.panel.write_aptus(pth)
else:
self.message("Don't understand how to write file '%s'" % pth)
def cmd_open(self, event_unused):
dlg = wx.FileDialog(
self, message="Open", defaultDir=os.getcwd(), defaultFile="",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST, wildcard=self.wildcards
)
typ, pth = self.show_file_dialog(dlg)
if typ:
self.open_file(pth)
def open_file(self, pth):
opts = AptusOptions(self.panel.compute)
opts.opts_from_file(pth)
self.SetClientSize(self.panel.compute.size)
self.panel.fire_command(id_redraw)
def cmd_help(self, event_unused):
from aptus.gui.help import HelpDlg
dlg = HelpDlg(self)
dlg.ShowModal()
def cmd_fullscreen(self, event_unused):
self.ShowFullScreen(not self.IsFullScreen())
def cmd_window_size(self, event_unused):
cur_size = "%d x %d" % tuple(self.GetClientSize())
dlg = wx.TextEntryDialog(self.GetTopLevelParent(), "Window size",
"New window size?", cur_size)
if dlg.ShowModal() == wx.ID_OK:
new_size = dlg.GetValue().strip()
m = re.match(r"(?P<w>\d+)\s*[x, ]\s*(?P<h>\d+)|s/(?P<mini>[\d.]+)", new_size)
if m:
if m.group('mini') is not None:
factor = float(m.group('mini'))
screen_w, screen_h = wx.GetDisplaySize()
w, h = screen_w/factor, screen_h/factor
elif m.group('w') is not None:
w, h = int(m.group('w')), int(m.group('h'))
self.SetClientSize((w,h))
dlg.Destroy()
def cmd_show_youarehere(self, event_unused):
""" Toggle the presence of the YouAreHere tool.
"""
if self.youarehere_tool:
self.youarehere_tool.Destroy()
else:
from aptus.gui import youarehere
self.youarehere_tool = youarehere.YouAreHereFrame(self, self.panel)
self.youarehere_tool.Show()
def cmd_show_palettes(self, event_unused):
""" Toggle the presence of the Palettes tool.
"""
if self.palettes_tool:
self.palettes_tool.Destroy()
else:
from aptus.gui import palettespanel
from aptus.palettes import all_palettes
self.palettes_tool = palettespanel.PalettesFrame(self, all_palettes, self.panel)
self.palettes_tool.Show()
def cmd_show_stats(self, event_unused):
""" Toggle the presence of the Stats tool.
"""
if self.stats_tool:
self.stats_tool.Destroy()
else:
from aptus.gui import statspanel
self.stats_tool = statspanel.StatsFrame(self, self.panel)
self.stats_tool.Show()
def cmd_show_pointinfo(self, event_unused):
""" Toggle the presence of the PointInfo tool.
"""
if self.pointinfo_tool:
self.pointinfo_tool.Destroy()
else:
from aptus.gui import pointinfo
self.pointinfo_tool = pointinfo.PointInfoFrame(self, self.panel)
self.pointinfo_tool.Show()
def cmd_show_julia(self, event_unused):
""" Toggle the presence of the Julia tool.
"""
if self.panel.compute.mode == 'mandelbrot':
if self.julia_tool:
self.julia_tool.Destroy()
else:
from aptus.gui import juliapanel
self.julia_tool = juliapanel.JuliaFrame(self, self.panel)
self.julia_tool.Show()
class MainFrameFileDropTarget(wx.FileDropTarget):
"""A drop target so files can be opened by dragging them to the Aptus window.
The first file opens in the current window, the rest open new windows.
"""
def __init__(self, frame):
wx.FileDropTarget.__init__(self)
self.frame = frame
def OnDropFiles(self, x, y, filenames):
self.frame.open_file(filenames[0])
for filename in filenames[1:]:
frame = self.frame.cmd_new(None)
frame.open_file(filename)
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/mainframe.py
|
mainframe.py
|
import math
import wx
from wx.lib.scrolledpanel import ScrolledPanel
from aptus import settings
from aptus.gui.computepanel import ComputePanel
from aptus.gui.ids import *
from aptus.gui.misc import AptusToolFrame, ListeningWindowMixin
MIN_RECT = 20
ParentComputePanel = ComputePanel
class YouAreHereWin(ParentComputePanel, ListeningWindowMixin):
""" A panel slaved to another ComputePanel to show where the master panel is
on the Set. These are designed to be stacked in a YouAreHereStack to
show successive magnifications.
Two windows are referenced: the main view window (so that we can change
the view), and the window our rectangle represents. This can be either
the next YouAreHereWin in the stack, or the main view window in the case
of the last window in the stack.
"""
def __init__(self, parent, mainwin, center, diam, size=wx.DefaultSize):
ParentComputePanel.__init__(self, parent, size=size)
ListeningWindowMixin.__init__(self)
self.mainwin = mainwin
self.hererect = None
self.diam = diam
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_IDLE, self.on_idle)
self.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_LEFT_UP, self.on_left_up)
self.Bind(wx.EVT_MOTION, self.on_motion)
self.register_listener(self.on_coloring_changed, EVT_APTUS_COLORING_CHANGED, self.mainwin)
self.register_listener(self.on_computation_changed, EVT_APTUS_COMPUTATION_CHANGED, self.mainwin)
self.set_ref_window(mainwin)
self.set_geometry(center=center, diam=diam)
self.on_coloring_changed(None)
self.on_computation_changed(None)
self.on_geometry_changed(None)
self.dragging = False
self.drag_pt = None
def set_ref_window(self, refwin):
""" Set the other window that our rectangle models.
"""
# Deregister the old geometry listener
self.deregister_listener(self.on_geometry_changed)
self.rectwin = refwin
# Register the new listener and calc the rectangle.
self.register_listener(self.on_geometry_changed, EVT_APTUS_GEOMETRY_CHANGED, self.rectwin)
self.calc_rectangle()
def on_size(self, event):
# Need to recalc our rectangle.
self.hererect = None
ParentComputePanel.on_size(self, event)
def on_idle(self, event):
# Let the ComputePanel resize.
ParentComputePanel.on_idle(self, event)
# Then we can recalc our rectangle.
if not self.hererect:
self.calc_rectangle()
def on_left_down(self, event):
mouse_pt = event.GetPosition()
if self.hererect.Contains(mouse_pt):
self.dragging = True
self.drag_pt = mouse_pt
def on_left_up(self, event):
# Reposition the main window.
if self.dragging:
if self.mainwin == self.rectwin:
# We already show the actual view, so just recenter on the center
# of the rectangle.
mx = self.hererect.x + self.hererect.width/2
my = self.hererect.y + self.hererect.height/2
self.mainwin.set_geometry(center=self.compute.coords_from_pixel(mx, my))
else:
# Dragging the rect: set the view to include the four corners of
# the rectangle.
ulr, uli = self.compute.coords_from_pixel(*self.hererect.TopLeft)
lrr, lri = self.compute.coords_from_pixel(*self.hererect.BottomRight)
self.mainwin.set_geometry(corners=(ulr, uli, lrr, lri))
self.dragging = False
else:
# Clicking outside the rect: recenter there.
mx, my = event.GetPosition()
self.mainwin.set_geometry(center=self.compute.coords_from_pixel(mx, my), diam=self.diam)
def on_motion(self, event):
self.set_cursor(event)
if self.dragging:
mouse_pt = event.GetPosition()
self.hererect.Offset((mouse_pt.x - self.drag_pt.x, mouse_pt.y - self.drag_pt.y))
self.drag_pt = mouse_pt
self.Refresh()
def set_cursor(self, event):
# Set the proper cursor:
mouse_pt = event.GetPosition()
if self.dragging or (self.hererect and self.hererect.Contains(mouse_pt)):
self.SetCursor(wx.Cursor(wx.CURSOR_SIZING))
else:
self.SetCursor(wx.Cursor(wx.CURSOR_DEFAULT))
def on_coloring_changed(self, event_unused):
if self.compute.copy_coloring(self.mainwin.compute):
self.coloring_changed()
def on_computation_changed(self, event_unused):
if self.compute.copy_computation(self.mainwin.compute):
self.computation_changed()
def on_geometry_changed(self, event_unused):
# When a geometry_changed event comes in, copy the pertinent info from
# the master window, then compute the window visible in our coordinates
if self.compute.angle != self.mainwin.compute.angle:
self.compute.angle = self.mainwin.compute.angle
self.geometry_changed()
self.calc_rectangle()
def calc_rectangle(self):
# Compute the master rectangle in our coords.
ulx, uly = self.compute.pixel_from_coords(*self.rectwin.compute.coords_from_pixel(0,0))
lrx, lry = self.compute.pixel_from_coords(*self.rectwin.compute.coords_from_pixel(*self.rectwin.compute.size))
ulx = int(math.floor(ulx))
uly = int(math.floor(uly))
lrx = int(math.ceil(lrx))+1
lry = int(math.ceil(lry))+1
w, h = lrx-ulx, lry-uly
# Never draw the box smaller than 3 pixels
if w < 3:
w = 3
ulx -= 1 # Scooch back to adjust to the wider window.
if h < 3:
h = 3
uly -= 1
self.hererect = wx.Rect(ulx, uly, w, h)
self.Refresh()
def on_paint_extras(self, dc):
# Draw the mainwin view window.
if self.hererect:
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(wx.Colour(255,255,255), 1, wx.SOLID))
dc.DrawRectangle(*self.hererect)
class YouAreHereStack(ScrolledPanel, ListeningWindowMixin):
""" A scrolled panel with a stack of YouAreHereWin's, each at a successive
magnification.
"""
def __init__(self, parent, viewwin, size=wx.DefaultSize):
ScrolledPanel.__init__(self, parent, size=size)
ListeningWindowMixin.__init__(self)
self.winsize = 250
self.minrect = MIN_RECT
self.stepfactor = float(self.winsize)/self.minrect
self.viewwin = viewwin
self.sizer = wx.FlexGridSizer(cols=1, vgap=2, hgap=0)
self.SetSizer(self.sizer)
self.SetAutoLayout(1)
self.SetupScrolling()
self.register_listener(self.on_geometry_changed, EVT_APTUS_GEOMETRY_CHANGED, self.viewwin)
self.on_geometry_changed()
def on_geometry_changed(self, event_unused=None):
mode = self.viewwin.compute.mode
diam = min(settings.diam(mode))
# How many YouAreHereWin's will we need?
targetdiam = min(self.viewwin.compute.diam)
num_wins = int(math.ceil((math.log(diam)-math.log(targetdiam))/math.log(self.stepfactor)))
num_wins = num_wins or 1
cur_wins = list(self.sizer.Children)
last = None
for i in range(num_wins):
if i == 0:
# Don't recenter the topmost YouAreHere.
center = settings.center(mode)
else:
center = self.viewwin.compute.center
if i < len(cur_wins):
# Re-using an existing window in the stack.
win = cur_wins[i].Window
win.set_geometry(center=center, diam=(diam,diam))
else:
# Going deeper: have to make a new window.
win = YouAreHereWin(
self, self.viewwin, center=center,
diam=(diam,diam), size=(self.winsize, self.winsize)
)
self.sizer.Add(win)
if last:
last.set_ref_window(win)
last = win
diam /= self.stepfactor
# The last window needs to draw a rectangle for the view window.
last.set_ref_window(self.viewwin)
# Remove windows we no longer need.
if 0:
for child in cur_wins[num_wins:]:
self.sizer.Remove(child.Window)
child.Window.Destroy()
for i in reversed(range(num_wins, len(cur_wins))):
print("Thing to delete:", cur_wins[i])
print("the window:", cur_wins[i].Window)
win = cur_wins[i].Window
self.sizer.Remove(i)
win.Destroy()
self.sizer.Layout()
self.SetupScrolling()
class YouAreHereFrame(AptusToolFrame):
def __init__(self, mainframe, viewwin):
AptusToolFrame.__init__(self, mainframe, title='You are here', size=(250,550))
self.stack = YouAreHereStack(self, viewwin)
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/youarehere.py
|
youarehere.py
|
from PIL import Image
import wx
from aptus import settings
from aptus.compute import AptusCompute
from aptus.gui.ids import *
from aptus.options import AptusState
from aptus.palettes import all_palettes
from aptus.progress import NullProgressReporter
class ComputePanel(wx.Panel):
""" A panel capable of drawing a Mandelbrot.
"""
def __init__(self, parent, size=wx.DefaultSize):
wx.Panel.__init__(self, parent, style=wx.NO_BORDER+wx.WANTS_CHARS, size=size)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.compute = AptusCompute()
self.compute.quiet = True # default to quiet.
# AptusCompute default values
self.compute.palette = all_palettes[0]
# Bind events
self.Bind(wx.EVT_WINDOW_CREATE, self.on_window_create)
self.Bind(wx.EVT_PAINT, self.on_paint)
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_IDLE, self.on_idle)
def set_geometry(self, center=None, diam=None, corners=None):
""" Change the panel to display a new place in the Set.
`center` is the ri coords of the new center, `diam` is the r and i
size of the view, `corners` is a 4-tuple (ulr, uli, lrr, lri) of the
four corners of the view. Only specify a subset of these.
"""
compute = self.compute
if corners:
ulr, uli, lrr, lri = corners
compute.center = ((ulr+lrr)/2, (uli+lri)/2)
ulx, uly = compute.pixel_from_coords(ulr, uli)
lrx, lry = compute.pixel_from_coords(lrr, lri)
compute.diam = (abs(compute.pixsize*(lrx-ulx)), abs(compute.pixsize*(lry-uly)))
if center:
compute.center = center
if diam:
compute.diam = diam
self.geometry_changed()
# GUI helpers
def fire_command(self, cmdid, data=None):
# I'm not entirely sure about why this is the right event type to use,
# but it works...
evt = wx.CommandEvent(wx.wxEVT_COMMAND_TOOL_CLICKED)
evt.SetId(cmdid)
evt.SetClientData(data)
wx.PostEvent(self, evt)
def fire_event(self, evclass, **kwargs):
evt = evclass(**kwargs)
self.GetEventHandler().ProcessEvent(evt)
def message(self, msg):
top = self.GetTopLevelParent()
top.message(msg)
def coloring_changed(self):
self.bitmap = None
self.Refresh()
self.fire_event(AptusColoringChangedEvent)
def computation_changed(self):
self.set_view()
self.fire_event(AptusComputationChangedEvent)
def geometry_changed(self):
self.set_view()
self.fire_event(AptusGeometryChangedEvent)
# Event handlers
def on_window_create(self, event):
self.on_idle(event)
def on_size(self, event_unused):
self.check_size = True
def on_idle(self, event_unused):
if self.check_size and self.GetClientSize() != self.compute.size:
if self.GetClientSize() != (0,0):
self.geometry_changed()
def on_paint(self, event_unused):
if not self.bitmap:
self.bitmap = self.draw_bitmap()
dc = wx.AutoBufferedPaintDC(self)
dc.DrawBitmap(self.bitmap, 0, 0, False)
self.on_paint_extras(dc)
def on_paint_extras(self, dc):
""" An overridable method so that derived classes can paint extra stuff
on top of the fractal.
"""
pass
# Information methods
def get_stats(self):
""" Return a dictionary full of statistics about the latest computation.
"""
return self.compute.stats
def get_point_info(self, pt):
""" Return a dictionary of information about the specified point (in client pixels).
If the point is outside the window, None is returned.
"""
if not self.GetRect().Contains(pt):
return None
x, y = pt
r, i = self.compute.coords_from_pixel(x, y)
if self.compute.pix is not None:
rgb = self.compute.pix[y, x]
color = "#%02x%02x%02x" % (rgb[0], rgb[1], rgb[2])
else:
color = None
count = self.compute.counts[y, x]
if self.compute.eng.cont_levels != 1:
count /= self.compute.eng.cont_levels
point_info = {
'x': x, 'y': y,
'r': r, 'i': i,
'count': count,
'color': color,
}
return point_info
# Output methods
def make_progress_reporter(self):
""" Create a progress reporter for use when this panel computes.
"""
return NullProgressReporter()
def bitmap_from_compute(self):
pix = self.compute.color_mandel()
bitmap = wx.Bitmap.FromBuffer(pix.shape[1], pix.shape[0], pix)
return bitmap
def draw_bitmap(self):
""" Return a bitmap with the image to display in the window.
"""
wx.BeginBusyCursor()
self.compute.progress = self.make_progress_reporter()
self.compute.while_waiting = self.draw_progress
self.compute.compute_pixels()
wx.CallAfter(self.fire_event, AptusRecomputedEvent)
self.Refresh()
bitmap = self.bitmap_from_compute()
wx.EndBusyCursor()
#print("Parent is active: %r" % self.GetParent().IsActive())
return bitmap
def draw_progress(self):
""" Called from the GUI thread periodically during computation.
Repaints the window.
"""
self.bitmap = self.bitmap_from_compute()
self.Refresh()
self.Update()
wx.CallAfter(self.fire_event, AptusRecomputedEvent)
wx.SafeYield(onlyIfNeeded=True)
def set_view(self):
self.bitmap = None
self.compute.size = self.GetClientSize()
self.compute.create_mandel()
self.check_size = False
self.Refresh()
# Output-writing methods
def write_png(self, pth):
""" Write the current image as a PNG to the path `pth`.
"""
image = self.bitmap.ConvertToImage()
im = Image.new('RGB', (image.GetWidth(), image.GetHeight()))
im.frombytes(bytes(image.GetData()))
self.compute.write_image(im, pth)
def write_aptus(self, pth):
""" Write the current Aptus state of the panel to the path `pth`.
"""
aptst = AptusState(self.compute)
aptst.write(pth)
class MiniComputePanel(ComputePanel):
""" A compute panel for use as a minor pane.
"""
def __init__(self, *args, **kwargs):
ComputePanel.__init__(self, *args, **kwargs)
self.Bind(wx.EVT_LEFT_DCLICK, self.on_left_dclick)
def on_left_dclick(self, event_unused):
""" Double-clicking on a mini compute panel opens a new window to the same
view.
"""
wx.GetApp().new_window(compute=self.compute, size=settings.explorer_size)
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/computepanel.py
|
computepanel.py
|
import webbrowser
import sys
import numpy
import wx
import wx.html2
import wx.lib.layoutf
from PIL import Image
from aptus import data_file, __version__
from aptus.options import AptusOptions
class HtmlDialog(wx.Dialog):
""" A simple dialog for displaying HTML, with clickable links that launch
a web browser, or change the page displayed in the dialog.
"""
def __init__(self, parent, caption, pages, subs=None,
pos=wx.DefaultPosition, size=(500,530),
style=wx.DEFAULT_DIALOG_STYLE):
wx.Dialog.__init__(self, parent, -1, caption, pos, size, style)
if pos == (-1, -1):
self.CenterOnScreen(wx.BOTH)
self.pages = pages
self.subs = subs or {}
self.html = wx.html2.WebView.New(self)
self.html.Bind(wx.html2.EVT_WEBVIEW_NAVIGATING, self.on_navigating)
ok = wx.Button(self, wx.ID_OK, "OK")
ok.SetDefault()
lc = wx.lib.layoutf.Layoutf('t=t#1;b=t5#2;l=l#1;r=r#1', (self,ok))
self.html.SetConstraints(lc)
self.set_page('interactive')
lc = wx.lib.layoutf.Layoutf('b=b5#1;r=r5#1;w!80;h*', (self,))
ok.SetConstraints(lc)
self.SetAutoLayout(1)
self.Layout()
def on_navigating(self, event):
url = event.GetURL()
if url == "":
event.Veto()
elif url.startswith(("http:", "https:")):
webbrowser.open(url)
event.Veto()
elif url.startswith('internal:'):
self.set_page(url.split(':')[1])
def set_page(self, pagename):
html = self.pages['head'] + self.pages[pagename]
html = html % self.subs
self.html.SetPage(html, "")
# The help text
is_mac = ('wxMac' in wx.PlatformInfo)
TERMS = {
'ctrl': 'cmd' if is_mac else 'ctrl',
'iconsrc': data_file('icon48.png'),
'version': __version__,
'python_version': sys.version,
'wx_version': wx.__version__,
'numpy_version': numpy.__version__,
'pil_version': Image.__version__,
}
HELP_PAGES = {
'head': """\
<style>
kbd {
display: inline-block;
background: #f0f0f0;
border: 2px solid #888;
border-color: #888 #333 #333 #888;
border-radius: .25em;
padding: .1em .25em;
margin: .1em;
}
</style>
<table width='100%%'>
<tr>
<td width='50' valign='top'><img src='%(iconsrc)s'/></td>
<td valign='top'>
<b>Aptus %(version)s</b>, Mandelbrot set explorer.<br>
Copyright 2007-2020, Ned Batchelder.<br>
<a href='https://nedbatchelder.com/code/aptus'>http://nedbatchelder.com/code/aptus</a>
</td>
</tr>
</table>
<p>
<a href='internal:interactive'>Interactive</a> |
<a href='internal:command'>Command line</a> |
<a href='internal:about'>About</a></p>
<hr>
""",
'interactive': """
<p><b>Interactive controls:</b></p>
<blockquote>
<kbd>a</kbd>: set the angle of rotation.<br>
<kbd>c</kbd>: toggle continuous coloring.<br>
<kbd>f</kbd>: toggle full-screen display.<br>
<kbd>h</kbd> or <kbd>?</kbd>: show this help.<br>
<kbd>i</kbd>: set the limit on iterations.<br>
<kbd>j</kbd>: jump among a few pre-determined locations.<br>
<kbd>n</kbd>: create a new window.<br>
<kbd>o</kbd>: open a saved settings or image file.<br>
<kbd>r</kbd>: redraw the current image.<br>
<kbd>s</kbd>: save the current image or settings.<br>
<kbd>w</kbd>: set the window size.<br>
<kbd><</kbd> or <kbd>></kbd>: switch to the next palette.<br>
<kbd>,</kbd> or <kbd>.</kbd>: cycle the current palette one color.<br>
<kbd>;</kbd> or <kbd>'</kbd>: stretch the palette colors (+%(ctrl)s: just a little), if continuous.<br>
<kbd>[</kbd> or <kbd>]</kbd>: adjust the hue of the palette (+%(ctrl)s: just a little).<br>
<kbd>{</kbd> or <kbd>}</kbd>: adjust the saturation of the palette (+%(ctrl)s: just a little).<br>
<kbd>0</kbd> (zero): reset all palette adjustments.<br>
<kbd>space</kbd>: drag mode: click to drag the image to a new position.<br>
<kbd>shift</kbd>: indicate a point of interest for Julia set and point info.<br>
<b>left-click</b>: zoom in (+%(ctrl)s: just a little).<br>
<b>right-click</b>: zoom out (+%(ctrl)s: just a little).<br>
<b>left-drag</b>: select a new rectangle to display.<br>
<b>middle-drag</b>: drag the image to a new position.<br>
</blockquote>
<p><b>Tool windows: press a key to toggle on and off:</b></p>
<blockquote>
<kbd>J</kbd> (shift-j): Show a Julia set for the current (shift-hovered) point.<br>
<kbd>l</kbd> (ell): Show zoom snapshots indicating the current position.<br>
<kbd>p</kbd>: Show a list of palettes that can be applied to the current view.<br>
<kbd>q</kbd>: Show point info for the current (shift-hovered) point.<br>
<kbd>v</kbd>: Show statistics for the latest calculation.
</blockquote>
""",
'command': """
<p>On the command line, use <tt><b>--help</b></tt> to see options:</p>
<pre>""" + AptusOptions(None).options_help() + "</pre>",
'about': """
<p>Built with
<a href='http://python.org'>Python</a>, <a href='http://wxpython.org'>wxPython</a>,
<a href='http://numpy.scipy.org/'>numpy</a>, and
<a href='http://www.pythonware.com/library/pil/handbook/index.htm'>PIL</a>.</p>
<p>Thanks to Rob McMullen and Paul Ollis for help with the drawing code.</p>
<hr>
<p>Installed versions:</p>
<p>
Aptus: %(version)s<br>
Python: %(python_version)s<br>
wx: %(wx_version)s<br>
numpy: %(numpy_version)s<br>
PIL: %(pil_version)s
</p>
""",
}
class HelpDlg(HtmlDialog):
""" The help dialog for Aptus.
"""
def __init__(self, parent):
HtmlDialog.__init__(self, parent, "Aptus", HELP_PAGES, subs=TERMS, size=(650,530))
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/gui/help.py
|
help.py
|
import ast
import asyncio
import base64
import functools
import io
import os
import pathlib
import cachetools
import PIL
import pydantic
import uvicorn
from fastapi import FastAPI, Request, Response
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from aptus import __version__
from aptus.compute import AptusCompute
from aptus.palettes import Palette, all_palettes
app = FastAPI()
HERE = pathlib.Path(__file__).parent
app.mount("/static", StaticFiles(directory=HERE / "static"), name="static")
templates = Jinja2Templates(directory=HERE / "templates")
@app.get("/", response_class=HTMLResponse)
async def home(request: Request):
context = {
"request": request,
"palettes": [p.spec() for p in all_palettes],
"version": __version__,
}
return templates.TemplateResponse("mainpage.html", context)
def run_in_executor(f):
# from https://stackoverflow.com/a/53719009/14343
@functools.wraps(f)
def inner(*args, **kwargs):
loop = asyncio.get_running_loop()
return loop.run_in_executor(None, lambda: f(*args, **kwargs))
return inner
# Cache of computed counts. One tile is about 830Kb.
cache_size = int(os.getenv("APTUS_CACHE", "500"))
tile_cache = cachetools.LRUCache(cache_size * 1_000_000, getsizeof=lambda nda: nda.nbytes)
@run_in_executor
def compute_tile(compute, cachekey):
old = tile_cache.get(cachekey)
if old is None:
compute.compute_array()
else:
compute.set_counts(old)
if old is None:
tile_cache[cachekey] = compute.counts
pix = compute.color_mandel()
im = PIL.Image.fromarray(pix)
fout = io.BytesIO()
compute.write_image(im, fout)
return fout.getvalue()
@run_in_executor
def compute_render(compute):
compute.compute_pixels()
pix = compute.color_mandel()
im = PIL.Image.fromarray(pix)
if compute.supersample > 1:
im = im.resize(compute.size, PIL.Image.ANTIALIAS)
fout = io.BytesIO()
compute.write_image(im, fout)
return fout.getvalue()
class ComputeSpec(pydantic.BaseModel):
center: tuple[float, float]
diam: tuple[float, float]
size: tuple[int, int]
supersample: int
coords: tuple[int, int, int, int]
angle: float
continuous: bool
iter_limit: int
palette: list
palette_tweaks: dict
class TileRequest(pydantic.BaseModel):
spec: ComputeSpec
seq: int
def spec_to_compute(spec):
compute = AptusCompute()
compute.quiet = True
compute.center = spec.center
compute.diam = spec.diam
compute.size = spec.size
compute.supersample = spec.supersample
compute.angle = spec.angle
compute.continuous = spec.continuous
compute.iter_limit = spec.iter_limit
compute.palette = Palette().from_spec(spec.palette)
compute.palette_phase = spec.palette_tweaks.get("phase", 0)
compute.palette_scale = spec.palette_tweaks.get("scale", 1.0)
compute.palette.adjust(
hue=spec.palette_tweaks.get("hue", 0),
saturation=spec.palette_tweaks.get("saturation", 0),
)
supercoords = [v * spec.supersample for v in spec.coords]
gparams = compute.grid_params().subtile(*supercoords)
compute.create_mandel(gparams)
return compute
@app.post("/tile")
async def tile(req: TileRequest):
spec = req.spec
compute = spec_to_compute(spec)
cachekey = f"""
{spec.center}
{spec.diam}
{spec.size}
{spec.angle}
{spec.continuous}
{spec.iter_limit}
{spec.coords}
"""
data = await compute_tile(compute, cachekey)
data_url = "data:image/png;base64," + base64.b64encode(data).decode("ascii")
return {
"url": data_url,
"seq": req.seq,
}
@app.post("/render")
async def render(spec: ComputeSpec):
compute = spec_to_compute(spec)
data = await compute_render(compute)
return Response(content=data)
def main():
uvicorn.run("aptus.web.server:app", host="127.0.0.1", port=8042, reload=True)
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/web/server.py
|
server.py
|
const Defaults = {
center: {r: -0.6, i: 0.0},
iter_limit: 1000,
angle: 0,
};
const View = {
tileX: 400,
// One map for all views, mapping overlay canvas elements to their view.
canvas_map: new Map(),
init(div) {
div.setAttribute("class", "canvas_container");
this.canvas_sizer = document.createElement("div");
this.canvas_sizer.setAttribute("class", "canvas_sizer");
div.appendChild(this.canvas_sizer);
this.backdrop_canvas = document.createElement("canvas");
this.backdrop_canvas.setAttribute("class", "view backdrop");
this.canvas_sizer.appendChild(this.backdrop_canvas);
this.fractal_canvas = document.createElement("canvas");
this.fractal_canvas.setAttribute("class", "view fractal");
this.canvas_sizer.appendChild(this.fractal_canvas);
this.overlay_canvas = document.createElement("canvas");
this.overlay_canvas.setAttribute("class", "view overlay");
this.canvas_sizer.appendChild(this.overlay_canvas);
this.canvas_map.set(this.overlay_canvas, this);
// Request sequence number. Requests include the sequence number and the tile
// returns it. If the sequence number has been incremented since the tile was
// requested, then the tile is no longer needed, and is not displayed.
this.reqseq = 0;
return this;
},
reset() {
this.set_center(Defaults.center.r, Defaults.center.i);
this.set_pixsize(3.0/600);
this.set_angle(Defaults.angle);
this.continuous = false;
this.set_iter_limit(Defaults.iter_limit);
this.palette_index = 0;
this.set_canvas_size("*");
this.tiles_pending = 0;
this.reset_palette_tweaks();
},
reset_palette_tweaks() {
this.palette_tweaks = {
phase: 0,
scale: 1,
hue: 0,
saturation: 0,
}
},
set_center(r, i) {
this.centerr = r;
this.centeri = i;
},
set_pixsize(ps) {
this.pixsize = ps;
},
set_angle(a) {
this.angle = (a % 360 + 360) % 360;
const rads = this.angle / 180 * Math.PI;
this.sina = Math.sin(rads);
this.cosa = Math.cos(rads);
return this.angle;
},
set_iter_limit(i) {
this.iter_limit = i;
},
set_continuous(c) {
this.continuous = c;
},
set_canvas_size(s) {
if (s === "*") {
this.canvas_size_w = this.canvas_size_h = null;
}
else {
const nums = s.split(/[ ,x]+/);
this.canvas_size_w = +nums[0];
this.canvas_size_h = +nums[1];
}
this.set_size();
},
set_size() {
if (this.canvas_size_w) {
this.canvasW = this.canvas_size_w;
this.canvasH = this.canvas_size_h;
}
else {
this.canvasW = window.innerWidth;
this.canvasH = window.innerHeight;
}
this.backdrop_canvas.width = this.fractal_canvas.width = this.overlay_canvas.width = this.canvasW;
this.backdrop_canvas.height = this.fractal_canvas.height = this.overlay_canvas.height = this.canvasH;
this.canvas_sizer.style.width = this.canvasW + "px";
this.canvas_sizer.style.height = this.canvasH + "px";
checkers(this.backdrop_canvas);
},
spec_for_tile() {
return {
center: [this.centerr, this.centeri],
diam: [
this.canvasW * this.pixsize,
this.canvasH * this.pixsize,
],
size: [this.canvasW, this.canvasH],
angle: this.angle,
continuous: this.continuous,
iter_limit: this.iter_limit,
palette: palettes[this.palette_index],
palette_tweaks: this.palette_tweaks,
};
},
spec_for_render(supersample, w, h) {
return {
...this.spec_for_tile(),
supersample: supersample,
coords: [0, w, 0, h],
size: [w, h],
};
},
paint() {
this.reqseq += 1;
const imageurls = [];
//palette = [
// ["spectrum", {
// ncolors: get_input_number("#ncolors"),
// h: [get_input_number("#hlo"), get_input_number("#hhi")],
// l: [get_input_number("#llo"), get_input_number("#lhi")],
// s: [get_input_number("#slo"), get_input_number("#shi")]
// }],
// ["stretch", {
// steps: get_input_number("#stretch"),
// hsl: true,
// ease: get_input_number("#ease")
// }]
//];
const nx = Math.floor(this.canvasW / this.tileX) || 1;
const ny = Math.floor(this.canvasH / this.tileX) || 1;
const dx = Math.ceil(this.canvasW / nx);
const dy = Math.ceil(this.canvasH / ny);
for (let tx = 0; tx < this.canvasW; tx += dx) {
for (let ty = 0; ty < this.canvasH; ty += dy) {
let tile = {
view: this,
reqseq: this.reqseq,
ctx: this.fractal_canvas.getContext("2d"),
tx,
ty,
spec: {
...this.spec_for_tile(),
supersample: 1,
coords: [
tx, Math.min(tx + dx, this.canvasW),
ty, Math.min(ty + dy, this.canvasH),
],
},
};
imageurls.push(tile);
}
}
this.tiles_pending = imageurls.length;
this.overlay_canvas.classList.add("wait");
return Promise.all(imageurls.map(getImage));
},
// xrot and yrot provide rotated versions of the x,y they are given.
xrot(x, y) {
return x * this.cosa + y * this.sina;
},
yrot(x, y) {
return y * this.cosa - x * this.sina;
},
ri4xy(x, y) {
const r0 = this.centerr - this.xrot(this.canvasW, this.canvasH)/2 * this.pixsize;
const i0 = this.centeri + this.yrot(this.canvasW, this.canvasH)/2 * this.pixsize;
const r = r0 + this.xrot(x, y) * this.pixsize;
const i = i0 - this.yrot(x, y) * this.pixsize;
return {r, i};
},
url_args() {
return (
`?cr=${this.centerr}&ci=${this.centeri}` +
(this.angle != Defaults.angle ? `&angle=${this.angle}` : "") +
(this.iter_limit != Defaults.iter_limit ? `&iter=${this.iter_limit}` : "") +
`&dw=${this.canvasW * this.pixsize}&dh=${this.canvasH * this.pixsize}`
);
},
};
function fetchTile(tile) {
return new Promise(resolve => {
const body = {
seq: tile.reqseq,
spec: tile.spec,
};
fetch_post_json("/tile", body)
.then(response => response.json())
.then(tiledata => {
if (tiledata.seq == tile.view.reqseq) {
tile.img = new Image();
tile.img.src = tiledata.url;
tile.img.onload = () => resolve(tile);
}
})
.catch(() => {});
});
}
function showTile(tile) {
tile.ctx.drawImage(tile.img, tile.tx, tile.ty);
tile.view.tiles_pending -= 1;
if (tile.view.tiles_pending == 0) {
tile.view.overlay_canvas.classList.remove("wait");
}
}
function getImage(tile) {
return fetchTile(tile).then(showTile);
}
function fetch_post_json(url, body) {
return fetch(url, {
method: "POST",
body: JSON.stringify(body),
headers: {
"Content-Type": "application/json",
},
})
.then(response => {
if (!response.ok) {
throw new Error(`${response.status}: ${response.statusText}`);
}
return response;
})
.catch(error => {
document.querySelector("#problempanel p").innerHTML = error;
Panels.show_panel("#problempanel");
return Promise.reject(error);
});
}
const App = {
init() {
this.view = Object.create(View).init(document.querySelector("#the_view"));
this.reset();
this.reset_dragging();
this.resize_timeout = null;
return this;
},
reset() {
this.view.reset();
const params = new URLSearchParams(document.location.search.substring(1));
const cr = parseFloat(params.get("cr") || Defaults.center.r);
const ci = parseFloat(params.get("ci") || Defaults.center.i);
const angle = parseFloat(params.get("angle") || Defaults.angle);
const dw = parseFloat(params.get("dw") || 2.7);
const dh = parseFloat(params.get("dh") || 2.7);
const iter = parseFloat(params.get("iter") || Defaults.iter_limit);
pixsize = Math.max(dw / this.view.canvasW, dh / this.view.canvasH);
this.set_center(cr, ci);
this.set_pixsize(pixsize);
this.set_angle(angle);
this.set_iter_limit(1000);
window.history.replaceState({}, "", "/");
},
reset_dragging() {
this.move_target = null;
this.moving = false;
this.mouse_dragging = false;
this.mouse_shift = false;
this.rubstart = null;
this.set_moving(false);
},
set_center(r, i) {
this.view.set_center(r, i);
set_input_value("#centerr", r);
set_input_value("#centeri", i);
},
set_pixsize(ps) {
this.view.set_pixsize(ps);
set_input_value("#pixsize", ps);
},
set_angle(a) {
set_input_value("#angle", this.view.set_angle(a));
},
set_iter_limit(i) {
this.view.set_iter_limit(i);
set_input_value("#iter_limit", i);
},
spec_change(ev) {
this.set_center(get_input_number("#centerr"), get_input_number("#centeri"));
this.set_pixsize(get_input_number("#pixsize"));
this.set_angle(get_input_number("#angle"));
this.set_iter_limit(get_input_number("#iter_limit"));
this.view.paint();
},
view_mousedown(ev) {
//console.log("down. shift:", ev.shiftKey, "ctrl:", ev.ctrlKey, "meta:", ev.metaKey, "alt:", ev.altKey);
ev.preventDefault();
this.move_target = ev.target;
this.rubstart = getCursorPosition(ev, this.move_target);
this.mouse_shift = ev.shiftKey;
},
view_mousemove(ev) {
if (!this.move_target) {
return;
}
ev.preventDefault();
const view = View.canvas_map.get(this.move_target);
const movedto = getCursorPosition(ev, this.move_target);
const dx = movedto.x - this.rubstart.x;
const dy = movedto.y - this.rubstart.y;
if (!this.mouse_dragging && Math.abs(dx) + Math.abs(dy) > 5) {
this.mouse_dragging = true;
this.set_moving(!this.mouse_shift);
}
clear_canvas(view.overlay_canvas);
if (this.mouse_dragging) {
if (this.moving) {
position_element(view.fractal_canvas, dx, dy);
}
else {
// With anti-aliasing, 0.5 offset makes 1-pixel wide.
const overlay_ctx = view.overlay_canvas.getContext("2d");
overlay_ctx.lineWidth = 1;
overlay_ctx.strokeStyle = "#ffffffc0";
overlay_ctx.strokeRect(this.rubstart.x + 0.5, this.rubstart.y + 0.5, dx, dy);
}
}
},
view_mouseup(ev) {
//console.log("up. shift:", ev.shiftKey, "ctrl:", ev.ctrlKey, "meta:", ev.metaKey, "alt:", ev.altKey);
if (!this.move_target) {
return;
}
ev.preventDefault();
const view = View.canvas_map.get(this.move_target);
const up = getCursorPosition(ev, this.move_target);
const dx = up.x - this.rubstart.x;
const dy = up.y - this.rubstart.y;
if (this.moving) {
this.set_center(
view.centerr - view.xrot(dx, dy) * view.pixsize,
view.centeri + view.yrot(dx, dy) * view.pixsize
);
const overlay_ctx = view.overlay_canvas.getContext("2d");
overlay_ctx.drawImage(view.fractal_canvas, dx, dy);
position_element(view.fractal_canvas, 0, 0);
clear_canvas(view.fractal_canvas);
view.paint().then(() => {
clear_canvas(view.overlay_canvas);
});
}
else {
clear_canvas(view.overlay_canvas);
if (this.mouse_dragging) {
const a = view.ri4xy(this.rubstart.x, this.rubstart.y);
const b = view.ri4xy(up.x, up.y);
const dr = a.r - b.r, di = a.i - b.i;
const rdr = view.xrot(dr, di);
const rdi = view.yrot(dr, di);
this.set_pixsize(Math.max(Math.abs(rdr) / view.canvasW, Math.abs(rdi) / view.canvasH));
this.set_center((a.r + b.r) / 2, (a.i + b.i) / 2);
}
else {
const {r: clickr, i: clicki} = view.ri4xy(up.x, up.y);
const factor = ev.altKey ? 1.1 : 2.0;
if (ev.shiftKey) {
this.set_pixsize(view.pixsize * factor);
}
else {
this.set_pixsize(view.pixsize / factor);
}
const r0 = clickr - view.xrot(up.x, up.y) * view.pixsize;
const i0 = clicki + view.yrot(up.x, up.y) * view.pixsize;
this.set_center(
r0 + view.xrot(view.canvasW, view.canvasH)/2 * view.pixsize,
i0 - view.yrot(view.canvasW, view.canvasH)/2 * view.pixsize
);
}
view.paint();
}
this.reset_dragging();
},
set_moving(m) {
if (this.moving = m) {
this.view.overlay_canvas.classList.add("move");
}
else {
this.view.overlay_canvas.classList.remove("move");
}
},
cancel_dragging() {
if (!this.move_target) {
return;
}
const view = View.canvas_map.get(this.move_target);
position_element(view.fractal_canvas, 0, 0);
clear_canvas(view.overlay_canvas);
this.reset_dragging();
},
keydown(ev) {
if (ev.target.matches("input")) {
return;
}
// console.log("key:", ev.key, "shift:", ev.shiftKey, "ctrl:", ev.ctrlKey, "meta:", ev.metaKey, "alt:", ev.altKey);
let key = ev.key;
// Mac option chars need to be mapped back to their original chars.
if (platform() === "mac") {
const oldkey = "¯˘·‚“‘”’…æÚÆ";
const newkey = "<>()[]{};':\"";
const i = oldkey.indexOf(key);
if (i >= 0) {
key = newkey[i];
}
}
let handled = false;
if (!ev.metaKey) {
handled = true;
switch (key) {
case "Escape":
this.cancel_dragging();
break;
case "a":
Panels.show_panel("#infopanel", "#angle");
break;
case "c":
this.view.set_continuous(!this.view.continuous);
this.view.paint();
break;
case "L":
const url = `${document.URL}${this.view.url_args()}`.replace("&", "&");
const html = `<a href="${url}">${url}</a>`;
document.querySelector("#linklink").innerHTML = html;
Panels.show_panel("#linkpanel");
break;
case "i":
Panels.show_panel("#infopanel", "#iter_limit");
break;
case "I":
Panels.toggle_panel("#infopanel");
break;
case "P":
Panels.toggle_panel("#palettepanel");
break;
case "r":
this.view.paint();
break;
case "R":
this.reset();
this.view.paint();
break;
case "s":
if (get_input_value("#rendersize") === "") {
set_input_value("#rendersize", `${this.view.canvasW} x ${this.view.canvasH}`);
}
Panels.toggle_panel("#renderform");
break;
case "w":
let text;
if (!this.view.canvas_size_w) {
text = "*";
}
else {
text = `${this.view.canvas_size_w} x ${this.view.canvas_size_h}`;
}
this.view.set_canvas_size(prompt("Canvas size", text));
this.view.paint();
break;
case "<":
this.view.palette_index -= 1;
if (this.view.palette_index < 0) {
this.view.palette_index += palettes.length;
}
this.view.paint();
break;
case ">":
this.view.palette_index += 1;
this.view.palette_index %= palettes.length;
this.view.paint();
break;
case ")":
this.set_angle(this.view.angle + (ev.altKey ? 1 : 10));
this.view.paint();
break;
case "(":
this.set_angle(this.view.angle - (ev.altKey ? 1 : 10));
this.view.paint();
break;
case ",":
this.view.palette_tweaks.phase -= 1;
this.view.paint();
break;
case ".":
this.view.palette_tweaks.phase += 1;
this.view.paint();
break;
case ";":
if (this.view.continuous) {
this.view.palette_tweaks.scale /= (ev.altKey ? 1.01 : 1.1);
this.view.paint();
}
break;
case "'":
if (this.view.continuous) {
this.view.palette_tweaks.scale *= (ev.altKey ? 1.01 : 1.1);
this.view.paint();
}
break;
case "[":
this.view.palette_tweaks.hue -= (ev.altKey ? 1 : 10);
this.view.paint();
break;
case "]":
this.view.palette_tweaks.hue += (ev.altKey ? 1 : 10);
this.view.paint();
break;
case "{":
this.view.palette_tweaks.saturation -= (ev.altKey ? 1 : 10);
this.view.paint();
break;
case "}":
this.view.palette_tweaks.saturation += (ev.altKey ? 1 : 10);
this.view.paint();
break;
case "0":
this.view.reset_palette_tweaks();
this.view.paint();
break;
case "?":
Panels.toggle_panel("#helppanel");
break;
default:
handled = false;
break;
}
}
if (handled) {
ev.preventDefault();
}
},
resize() {
if (this.resize_timeout) {
clearTimeout(this.resize_timeout);
}
this.resize_timeout = setTimeout(
() => {
this.resize_timeout = null;
this.view.set_size();
this.view.paint();
},
250
);
},
click_render(ev) {
const supersample = get_input_number("#supersample");
const nums = get_input_value("#rendersize").split(/[ ,x]+/);
const spec = this.view.spec_for_render(supersample, +nums[0], +nums[1]);
document.querySelector("#renderwait").classList.add("show");
Panels.show_panel("#renderwait .panel");
fetch_post_json("/render", spec)
.then(response => response.blob())
.then(blob => {
document.querySelector("#renderwait").classList.remove("show");
const save = document.createElement("a");
save.href = URL.createObjectURL(blob);
save.target = "_blank";
save.download = "Aptus.png";
save.dispatchEvent(new MouseEvent("click"));
save.remove();
});
},
};
function getCursorPosition(ev, target) {
const rect = target.getBoundingClientRect();
const x = ev.clientX - rect.left;
const y = ev.clientY - rect.top;
return {x, y};
}
function set_input_value(sel, val) {
document.querySelector(sel).value = "" + val;
}
function get_input_value(sel) {
return document.querySelector(sel).value;
}
function get_input_number(sel) {
return +get_input_value(sel);
}
const Panels = {
draggable: null,
draggable_start: null,
rubstart: null,
bring_to_top(el, els) {
const indexes = [...els].map(e => {
const z = getComputedStyle(e).zIndex;
return (z === "auto") ? 0 : z;
});
el.style.zIndex = Math.max(...indexes) + 1;
},
bring_panel_to_top(el) {
this.bring_to_top(el, document.querySelectorAll(".panel"));
},
toggle_panel(panelsel) {
const panel = document.querySelector(panelsel);
if (panel.style.display === "block") {
panel.style.display = "none";
}
else {
this.show_panel(panel);
}
},
show_panel(panel, inputsel) {
if (typeof panel === 'string') {
panel = document.querySelector(panel);
}
panel.style.display = "block";
let at_x = panel.offsetLeft, at_y = panel.offsetTop;
if (at_x > window.innerWidth) {
at_x = (window.innerWidth - panel.clientWidth) / 2;
}
if (at_y > window.innerHeight) {
at_y = (window.innerHeight - panel.clientHeight) / 2;
}
position_element(panel, at_x, at_y);
this.bring_panel_to_top(panel);
if (inputsel) {
const inp = document.querySelector(inputsel);
inp.focus();
inp.select();
}
},
close_panel(ev) {
const panel = ev.target.closest(".panel");
panel.style.display = "none";
},
draggable_mousedown(ev) {
if (ev.target.matches("input")) {
return;
}
ev.preventDefault();
ev.stopPropagation();
const active = document.activeElement;
if (active) {
active.blur();
}
this.draggable = ev.delegate;
this.draggable.classList.add("dragging");
this.rubstart = {x: ev.clientX, y: ev.clientY};
this.draggable_start = {x: this.draggable.offsetLeft, y: this.draggable.offsetTop};
this.bring_panel_to_top(this.draggable);
position_element(this.draggable, this.draggable.offsetLeft, this.draggable.offsetTop);
},
draggable_mousemove(ev) {
if (!this.draggable) {
return;
}
ev.preventDefault();
ev.stopPropagation();
position_element(
this.draggable,
this.draggable_start.x - (this.rubstart.x - ev.clientX),
this.draggable_start.y - (this.rubstart.y - ev.clientY)
);
},
draggable_mouseup(ev) {
if (!this.draggable) {
return;
}
ev.preventDefault();
ev.stopPropagation();
this.draggable.classList.remove("dragging");
this.draggable = null;
},
}
// From: https://gist.github.com/JustinChristensen/652bedadc92cf0aff86cc5fbcde87732
// <wroathe> You can then do on(document.body, 'pointerdown', e => console.log(e.delegate), '.draggable');
function delegatedTo(sel, fn) {
return ev => {
ev.delegate = ev.target.closest(sel);
if (ev.delegate) {
fn(ev);
}
};
};
function on_event(el, evname, fn, sel) {
if (sel) {
fn = delegatedTo(sel, fn);
}
if (typeof el === 'string') {
el = document.querySelectorAll(el);
}
if (!el.forEach) {
el = [el];
}
el.forEach(e => e.addEventListener(evname, fn));
}
function platform() {
if (navigator.platform.indexOf("Mac") > -1) {
return "mac";
}
else if (navigator.platform.indexOf("Win") > -1) {
return "win";
}
}
function position_element(elt, x, y) {
elt.style.inset = `${y}px auto auto ${x}px`;
}
function clear_canvas(canvas) {
const ctx = canvas.getContext("2d");
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
function checkers(canvas) {
const ctx = canvas.getContext("2d");
const w = canvas.width, h = canvas.height;
ctx.fillStyle = "#aaaaaa";
ctx.fillRect(0, 0, w, h);
const sqw = 50;
ctx.fillStyle = "#999999";
for (let col = 0; col < (w / sqw); col += 1) {
for (let row = 0; row < (h / sqw); row += 1) {
if ((row + col) % 2) {
ctx.fillRect(col * sqw, row * sqw, sqw, sqw);
}
}
}
}
function main() {
if (platform() === "mac") {
document.querySelector("html").classList.add("mac");
}
App.init();
on_event(document, "mousedown", ev => App.view_mousedown(ev), ".view.overlay");
on_event(document, "mousemove", ev => App.view_mousemove(ev));
on_event(document, "mouseup", ev => App.view_mouseup(ev));
on_event(document, "keydown", ev => App.keydown(ev));
on_event(document, "mousedown", ev => Panels.draggable_mousedown(ev), ".draggable");
on_event(document, "mousemove", ev => Panels.draggable_mousemove(ev));
on_event(document, "mouseup", ev => Panels.draggable_mouseup(ev));
on_event(".panel .closebtn", "click", ev => Panels.close_panel(ev));
on_event("#infopanel input", "change", ev => App.spec_change(ev));
on_event(window, "resize", ev => App.resize(ev));
on_event("#renderbutton", "click", ev => App.click_render(ev));
App.view.set_size();
App.view.paint();
setTimeout(
() => document.querySelector("#splash").classList.add("hidden"),
4000
);
}
|
Aptus
|
/Aptus-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl/aptus/web/static/aptus.js
|
aptus.js
|
# Project Apicula
Documentation and open source tools for the Gowin FPGA bitstream format.
Project Apicula uses a combination of fuzzing and parsing of the vendor data files to provide Python tools for generating bitstreams.
This project is supported by our generous sponsors. Have a look at our [contributors](https://github.com/YosysHQ/apicula/graphs/contributors) and sponsor them with via the various platforms linked on our [github](https://github.com/YosysHQ/apicula).
## Getting Started
Install the latest git [yosys](https://github.com/yosyshq/yosys#setup), [nextpnr-gowin](https://github.com/YosysHQ/nextpnr#nextpnr-gowin), [openFPGALoader](https://github.com/trabucayre/openFPGALoader), and Python 3.8 or higher. [Yowasp](http://yowasp.org/) versions of Yosys and Nextpnr are also supported.
Currently supported boards are
* Trenz TEC0117: GW1NR-UV9QN881C6/I5
* Sipeed Tang Nano: GW1N-LV1QN48C6/I5
* Sipeed Tang Nano 1K: GW1NZ-LV1QN48C6/I5
* Sipeed Tang Nano 4K: GW1NSR-LV4CQN48PC7/I6
* Sipeed Tang Nano 9K: GW1NR-LV9QN88PC6/I5 [^1]
* Seeed RUNBER: GW1N-UV4LQ144C6/I5
* @Disasm honeycomb: GW1NS-UX2CQN48C5/I4
[^1]: `C` devices require passing the `--family` flag as well as `--device` to Nextpnr, and stating the family in place of device when passing `-d` to `gowin_pack` because the C isn't part of the device ID but only present in the date code. Check `examples/Makefile` for the correct command.
Install the tools with pip.
```bash
pip install apycula
```
Note that on some systems the installed binaries might not be on the path. Either add the binaries to the path, or use the path of the _installed binary_ directly. (running the source files will lead to import errors)
```bash
which gowin_bba # check if binaries are on the path
python -m site --user-base # find the site packages base directory
ls $HOME/.local/bin # confirm the binaries are installed in this folder
export PATH="$HOME/.local/bin:$PATH" # add binaries to the path
```
From there, compile a blinky.
The example below is for the Trenz TEC0117. For other devices, use the model numbers listed above for `--device`, and replace `tec0117` with `runber`, `tangnano`, `tangnano4k` or `honeycomb` accordingly. Also note the number of LEDs on your board: 8 for tec0117 and runber, 3 for honeycomb and tangnano.
You can also use the Makefile in the examples folder to build the examples.
```bash
cd examples
yosys -D LEDS_NR=8 -p "read_verilog blinky.v; synth_gowin -json blinky.json"
DEVICE='GW1NR-UV9QN881C6/I5' # change to your device
BOARD='tec0117' # change to your board
nextpnr-gowin --json blinky.json \
--write pnrblinky.json \
--device $DEVICE \
--cst $BOARD.cst
gowin_pack -d $DEVICE -o pack.fs pnrblinky.json # chango to your device
# gowin_unpack -d $DEVICE -o unpack.v pack.fs
# yosys -p "read_verilog -lib +/gowin/cells_sim.v; clean -purge; show" unpack.v
openFPGALoader -b $BOARD pack.fs
```
For the Tangnano9k board, you need to call nextpnr and gowin_pack with the chip family as follows:
```
nextpnr-gowin --json blinky.json \
--write pnrblinky.json \
--device $DEVICE \
--family GW1N-9C \
--cst $BOARD.cst
gowin_pack -d GW1N-9C -o pack.fs pnrblinky.json
```
## Getting started for contributors
In addition to the above, to run the fuzzers and build the ChipDB, the following additional dependencies are needed.
Version 1.9.8 of the Gowin vendor tools. Newer versions may work, but have not been tested.
Alternatively, you can use the `Dockerfile` to run the fuzzers in a container.
To run the fuzzers, do the following in a checkout of this repo
```bash
pip install -e .
export GOWINHOME=/gowin/installation
make
```
## Resources
Check out the `doc` folder for documentation about the FPGA architecture, vendor file structure, and bitstream structure.
My internship report about this project [can be downloaded here](https://github.com/pepijndevos/internshipreport).
My presentations at [FPT2020](https://www.youtube.com/watch?v=kyQLtBh6h0U) and [RC3](https://media.ccc.de/v/rc3-739325-how_to_fuzz_an_fpga_my_experience_documenting_gowin_fpgas).
I did a few [livestreams on twitch](https://www.twitch.tv/pepijnthefox) working on this project, which are collected [on this playlist](https://www.youtube.com/playlist?list=PLIYslVBAlKZad3tjr5Y4gqBV3QKQ5_tPw) I've also started to write Jupyter notebooks of my explorations that are more condensed than a video.
You can also come chat on [Matrix](https://matrix.to/#/#apicula:matrix.org) or [IRC](https://web.libera.chat/#yosys-apicula)
## What remains to be done / how can I help?
There is a lot of work left to do before this is a mature and complete FPGA flow.
The upside is that there is something for people from all skill levels and backgrounds.
### Fuzzing
This project partially relies on the data files provided by the vendor to work.
However, the exact meaning of these files is often not completely understood.
Fuzzing can be used to discover the meaning of the vendor files.
`tiled_fuzzer.py` is a fuzzer that uses vendor files to find bits in a specific tile type. Adding code for a new primitive or tile type is relatively easy. All that is neede is a function that uses `codegen.py` to generate the primitive of interest, which has to be added to the `fuzzers` list. Then the output at the bottom of the script can be adjusted to your needs.
There is a `fuse_h4x.parse_tile` function which uses our understanding of the vendor files to look for matching items. On the other hand `fuse_h4x.scan_fuses` will just give you a list of fuses that were set in the tile, and `fuse_h4x.scan_tables` will go through *all* vendor data tables and spit out even a partial match. The latter will give false positives, but is helpful when discovering new tables.
`fuzzer.py` is a bit more complex to write new fuzzers for, but could be usefull in some cases. It is for example much more efficient in fuzzing array parameters such as LUT bits, BRAM contents, and PLL settings. Have a look at `Lut4BitsFuzzer` for ideas about how to fuzz BRAM and DRAM for example.
Things that could be fuzzed:
* DRAM modes and bits
* BRAM modes and bits
* IO logic (LVDS etc.), expected to be complex.
* PLL settings
### Parsing
For each FPGA, the vendor provides `.dat`, `.fse`, `.ini`, `.pwr`, and `.tm` files. Of these, only parsers for `.dat`, `.fse` and `.tm` have been written.
The format of these other files is unknown, you're on your own here. I could only offer you some vague pointers based on experience from the other two files.
For a description of the known file formats, [see the documentation](doc/filestructure.md).
The parser for the `.fse` format is fairly robust and complete, but vendor software updates sometimes add new file and table types.
The main thing lacking here is a better understanding of the meaning of all these tables. Part of this can be done with [fuzzing](#fuzzing), but another large part is just looking at the data for patterns and correlations. For example, some numbers might be indices into other tables, wire IDs, fuse IDs, or encoded X/Y positions.
The parser for the `.dat` file is more fragile and incomplete. This is mainly because it just appears to be a fixed format struct with array fields. New vendor software versions sometimes add new fields, breaking the parser. Here there are actually a few gaps in the data that have not been decoded and named. It is suspected that at least some of these gaps are related to pinouts and packaging.
The format of the '.tm' appears to be just a big collection of floats. Not all of them have a meaning that is well understood, but the parser itself is fairly complete.
### Refactoring
There are quite a few sketchy places in the code that could use some tender loving care, without taking a deep dive into FPGA documenting.
The `.dat` parser was sort of patched to output a JSON file, but it would be a lot nicer if one could just import it as a library and get Python datastructures back directly. Both parsers could optionally be extended to map known IDs to more human readable values (`wirenames.py` for example), provide a more convenient structure, and chomp of padding values.
The fuzzers should be extended so that they run against all FPGA types. This is important to detect differences between FPGAs and generate ChipDBs for all of them. This does not require much in-depth knowledge. Just adding parameters for all FPGA types. A bit more involved is extending the fuzzer to fuzz global settings and constraints, these would need to be assigned config bits and toggle them accordingly.
This project was funded through the <a href="/PET">NGI0 PET</a> Fund, a fund established by <a href="https://nlnet.nl">NLnet</a> with financial support from the European Commission's <a href="https://ngi.eu">Next Generation Internet</a> programme, under the aegis of DG Communications Networks, Content and Technology under grant agreement N<sup>o</sup> 825310.
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/readme.md
|
readme.md
|
```
import tiled_fuzzer
import codegen
import pindef
import bslib
import chipdb
import fuse_h4x
import gowin_unpack
from wirenames import wirenames
from PIL import Image
import numpy as np
import pickle
def dff(mod, cst, row, col, clk=None):
"make a dff with optional clock"
name = tiled_fuzzer.make_name("DFF", "DFF")
dff = codegen.Primitive("DFF", name)
dff.portmap['CLK'] = clk if clk else name+"_CLK"
dff.portmap['D'] = name+"_D"
dff.portmap['Q'] = name+"_Q"
mod.wires.update(dff.portmap.values())
mod.primitives[name] = dff
cst.cells[name] = f"R{row}C{col}"
return dff.portmap['CLK']
def ibuf(mod, cst, loc, clk=None):
"make an ibuf with optional clock"
name = tiled_fuzzer.make_name("IOB", "IBUF")
iob = codegen.Primitive("IBUF", name)
iob.portmap["I"] = name+"_I"
iob.portmap["O"] = clk if clk else name+"_O"
mod.wires.update([iob.portmap["O"]])
mod.inputs.update([iob.portmap["I"]])
mod.primitives[name] = iob
cst.ports[name] = loc
return iob.portmap["O"]
with open(f"{tiled_fuzzer.gowinhome}/IDE/share/device/{tiled_fuzzer.device}/{tiled_fuzzer.device}.fse", 'rb') as f:
fse = fuse_h4x.readFse(f)
with open(f"{tiled_fuzzer.device}.pickle", 'rb') as f:
db = pickle.load(f)
dff_locs = [(2, 18), (2, 5), (10, 5), (10, 18), (10, 18)]
clock_pins = pindef.get_clock_locs("GW1N-1", "QN48", header=1)
# pins appear to be differential with T/C denoting true/complementary
true_pins = [p[0] for p in clock_pins if "GCLKT" in p[1]]
true_pins
mod = codegen.Module()
cst = codegen.Constraints()
ibufs = [ibuf(mod, cst, p) for p in true_pins]
dffs = [dff(mod, cst, row, col) for row, col in dff_locs]
bs, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
bslib.display(None, bs)
mod.assigns = zip(dffs, ibufs)
bs_clk, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
diff = bs^bs_clk
tiles = fuse_h4x.tile_bitmap(fse, diff)
bslib.display(None, diff)
tiles.keys()
for loc, tile in tiles.items():
im = bslib.display(None, tile)
im_scaled = im.resize((im.width*4, im.height*4), Image.NEAREST)
if (loc[0]+1, loc[1]+1) in dff_locs:
print('dff')
print(loc)
display(im_scaled)
```
At this point several things are notable.
* In the DFF that were placed, a few bits in the routing MUXes changed, 99% sure that's just the connection to `GBxx`, which warants confirmation.
* **No bits in the IOB changed**. This suggests they are by default connected to the clock tree? What if you use the complementary pin?
* A bunch of bits in the center tile changed, this is probably where the meat of the clock routing happens.
* In the center row there are a few tiles with 2 bits changed, most likely connecting a horizontal "spine" from the center tile to a vertical "tap".
* In several tiles there is one bit, most likely connecting the "tap" to a horizontal "branch".
This all has to be further explored.
It can be seen that the tiles in which the "spine" is connected to the "taps" are of a distinct type, namely type 13 and 14.

Meanwhile, looking at the bits within a tile of type 12, it can be seen that the single dots correspond with the bottom right cluster, while the bits of type 13 and 14 are not present. There is an empty column there.

Let's see if tile 13 and 14 indeed have something interesting at these bits.
```
tile = fuse_h4x.render_tile(fse, 14)
im = fuse_h4x.display(None, tile)
im.resize((im.width*8, im.height*8), Image.NEAREST) # THEY DO!
# Lets parse the tile to confirm the MUX
bm = chipdb.tile_bitmap(db, bs_clk)
interesting = {loc[0:2] for loc in tiles.keys()}
for idx, t in bm.items():
if idx in interesting:
row, col = idx
dbtile = db.grid[row][col]
bels, pips = gowin_unpack.parse_tile_(dbtile, t)
print(repr(idx) +
(", DFF0" if 'DFF0' in bels else "") +
(", DFF1" if 'DFF1' in bels else "") +
(", CLK0: " + pips['CLK0'] if pips['CLK0'] != "VCC" else "") +
(", CLK1: " + pips['CLK1'] if pips['CLK1'] != "VCC" else "") +
(", GBO0: " + pips.get('GBO0') if pips.get('GBO0') else "") +
(", GBO1: " + pips.get('GBO1') if pips.get('GBO1') else "")
)
```
So this confirms that `CLK0` is indeed connected to `GBxx`, and in fact 4 diffrent `GBxx`.
It also shows that we're already decoding the `GBO0` to `GT00` connection, which I'm 99% sure corresponds to the bottom right bit.
What it all means? Nobody knows. Okay, maybe some god and a few Gowin engineers...
The fact that there is only one `GBO0` to `GT00` connection would suggest it routes all `GBxx` and then selects one locally.
But what's weird is that it seems to route each via a seperate tap, while the DFF are in the same column. Would it then not be more sense to enable one tap for both?
Maybe this is to limit clock skew?
Next step is to dig in the fuse file and confirm the `GBO0` to `GT00` connection and inspect the top right bits in tiles 13 and 14.
```
spine_tile = tiles[(6, 4, 14)]
#fs = fuse_h4x.scan_fuses(fse, 14, spine_tile)
loc = list(zip(*np.where(spine_tile==1)))
print(loc)
#fuses = fuse_h4x.scan_tables(fse, 14, fs)
#parsed = fuse_h4x.parse_tile(fse, 14, spine_tile)
# stolen from chipdb.py
def fse_pips(fse, ttyp, table):
pips = {}
for srcid, destid, *fuses in fse[ttyp]['wire'][table]:
fuses = {fuse_h4x.fuse_lookup(fse, ttyp, f) for f in chipdb.unpad(fuses)}
if srcid < 0:
fuses = set()
srcid = -srcid
#if srcid > 1000:
# srcid -= 1000 # what does it mean?
#if destid > 1000:
# destid -= 1000 # what does it mean?
src = wirenames[srcid]
dest = wirenames[destid]
pips.setdefault(dest, {})[src] = fuses
return pips
fse_pips(fse, 13, 38)
```
That is... somewhat confusing. But hey, it seems that wire table 38 contains the fuses for the top right bits, which drive `GT00`.
It's worth noting the source wire names might not be accurate, this is after all a completely different wire table.
Normal routing is in table 2.
Note also from `wirenames` that there are just two `GBO0`/`GBO1` and two `GT00`/`GT10`, while there are 8 `GBxx`. (note `O`/`0`).
Does this match with primary/secondary naming in the timing info? Seems plausible.
So okay, I can get into this. *something* drives the vertical `GT00` wires, which drives the horizontal `GBO0` wires, which drives the `GBxx` inputs at th DFF. I think it's time to look at this big and scary center tiles.
```
print("Wire tables:", list(fse[82]['wire'].keys()), list(fse[83]['wire'].keys())) # table 38 again!
print("Tile 82 destinations:", list(fse_pips(fse, 82, 38).keys()))
print("Tile 14 source: ", [list(k.keys()) for k in fse_pips(fse, 14, 38).values()])
print("Tile 83 destinations:", list(fse_pips(fse, 83, 38).keys()))
print("Tile 13 source: ", [list(k.keys()) for k in fse_pips(fse, 13, 38).values()])
```
This is interesting, it appears that despite the awkward names, most of the routes actually match. The puzzling part is the sources of the center tiles.
Somehow these have to match up with dedicated clock pins and other clock sources.
If the taps and branches indeed switch all 8 global clocks at once, there should be a bunch of different muxes in there.
How is this encoded in the fuse file? No idea. More poking around needed...
```
tile = fuse_h4x.render_tile(fse, 83)
im = fuse_h4x.display(None, tile)
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
im = bslib.display(None, tiles[(5, 10, 83)][-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
tile = fuse_h4x.render_tile(fse, 82)
im = fuse_h4x.display(None, tile)
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
im = bslib.display(None, tiles[(5, 9, 82)][-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
# just to verify that the tile itself rather than the diff doesn't have more interesting stuff.
# it does not. A few const bits that are always there.
tiles_clk = fuse_h4x.tile_bitmap(fse, bs_clk)
im = bslib.display(None, tiles_clk[(5, 9, 82)])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
im = bslib.display(None, tiles_clk[(5, 10, 83)])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
p = fse_pips(fse, 14, 2)
p['GBO0'], p['GBO1'] # there is only one option here
# lets do some dumb fuzzing!
from multiprocessing.dummy import Pool
mod = codegen.Module()
cst = codegen.Constraints()
ibuf(mod, cst, true_pins[2], clk="myclk")
base_bs, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
modules = []
constrs = []
for i in range(2, 20):
mod = codegen.Module()
cst = codegen.Constraints()
ibuf(mod, cst, true_pins[0], clk="myclk")
dff(mod, cst, 8, i, clk="myclk")
modules.append(mod)
constrs.append(cst)
p = Pool()
pnr_res = p.map(lambda param: tiled_fuzzer.run_pnr(*param, {}), zip(modules, constrs))
for sweep_bs, *rest in pnr_res:
sweep_tiles = fuse_h4x.tile_bitmap(fse, sweep_bs^base_bs)
print("#"*80)
for loc, tile in sweep_tiles.items():
row, col, ttyp = loc
dbtile = db.grid[row][col]
bels, pips = gowin_unpack.parse_tile_(dbtile, tile)
if ttyp == 12:
print("branch", col)
pass
elif ttyp in {13, 14}:
print("tap", col)
elif ttyp in {82, 83}:
im = bslib.display(None, tile[-2:,:])
im_scaled = im.resize((im.width*4, im.height*4), Image.NEAREST)
display(im_scaled)
# that's... not what I expected. I changed a few things but it seems pretty consistent.
# lemme try changing between clock inputs to see if ANYTHING changes...
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
flop = dff(mod, cst, 2, 2)
bs_base, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
clk_bss = []
for clk in clks:
mod.assigns = [(flop, clk)]
bs_clkn, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
clk_bss.append(bs_clkn)
for mybs in clk_bss:
clk_tiles = fuse_h4x.tile_bitmap(fse, mybs^bs_base)
im = bslib.display(None, clk_tiles[(5, 9, 82)][-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
tile = fuse_h4x.render_tile(fse, 82)
im = fuse_h4x.display(None, tile[-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
```
Fascinating... so if I sweep a DFF along all the colums, it basically takes the clock from taps 1, 5, 13, 17. The only change in the center mux is between left/right.
When instead I sweep the clock pin used, it only changes a few bits in the center mux. So I'm kinda assuming that particular mux is hardwired to these two taps on each side, and other muxes drive other taps along the spine.
Since I don't have a trivial way to make it use any other mux voluntarily, fuzzing this will be a big PITA. It probably pays off to look at the vendor data more deeply.
I wonder what happens if I fill a whole row with DFF. Many rows? The whole FPGA? What if I fill a whole row with each different clock pin, and then cycle the pins around? Time to find out.
```
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i in range(2, 7):
for j in range(2, 20):
if i < 6:
flop = dff(mod, cst, i, j)
else:
flop = dff(mod, cst, 2+i, j)
bs_base, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i in range(2, 7):
for j in range(2, 20):
if i < 6:
flop = dff(mod, cst, i, j)
else:
flop = dff(mod, cst, 2+i, j)
mod.assigns.append((flop, clks[0]))
bs_oneclk, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i, clk in enumerate(clks):
for j in range(2, 20):
if i < 4:
flop = dff(mod, cst, 2+i, j)
else:
flop = dff(mod, cst, 4+i, j)
mod.assigns.append((flop, clk))
bs_allclks, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
display(bslib.display(None, bs_oneclk^bs_base))
display(bslib.display(None, bs_allclks^bs_base))
```
At this point I kinda understand the single-clock case well enough. The two inner muxes select a clock source, and route it to columns 1, 5, 13, 17. From there two fuses enable the tap, and at each row the single fuse enables the branch for the cells around it. A bit up we saw that columns 1-3 are controlled from tap 1, 4-9 from tap 5, 10-15 from tap 13, and 16-18 from tap 17.
For the case of multiple clocks, we can see that all GT00 (primary?) spines are enabled, and the 5th pin is connected to GT10. This lines up with the one-clock case. Would it be the case that GB00-GB30 are GT00 and GB40-GB70 are GT10? It seems like that so far. What if I do 4 clocks, and then change one pin at a time and see what changes?
```
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i in range(2, 6):
for j in range(2, 20):
flop = dff(mod, cst, i, j)
bs_base, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
clk_sweep_bs = []
for k in range(5):
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i, clk in enumerate(clks[:-1]):
for j in range(2, 20):
flop = dff(mod, cst, 2+i, j)
if i == k:
mod.assigns.append((flop, clks[-1]))
else:
mod.assigns.append((flop, clk))
bs_tmp, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
clk_sweep_bs.append(bs_tmp)
for mybs in clk_sweep_bs:
display(bslib.display(None, mybs^bs_base))
tile = fuse_h4x.render_tile(fse, 82)
im = fuse_h4x.display(None, tile[-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
for mybs in clk_sweep_bs[:-1]:
clk_tiles = fuse_h4x.tile_bitmap(fse, mybs^clk_sweep_bs[-1])
ctile = clk_tiles[(5, 9, 82)].astype('uint8')
im = fuse_h4x.display(None, (tile*ctile)[-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
```
So hey, when you swap a clock it keeps the routing the same, but just changes one mux. This makes sense. Now what I want to know is which columns correspond to which mux. Let's take it row by row, and see if we can tell that way.
```
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i in range(2, 6):
for j in range(2, 20):
flop = dff(mod, cst, i, j)
bs_base, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
clk_march_bs = []
for k in range(4):
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i, clk in enumerate(clks[:-1]):
for j in range(2, 20):
flop = dff(mod, cst, 2+i, j)
if i <= k:
mod.assigns.append((flop, clk))
bs_tmp, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
clk_march_bs.append(bs_tmp)
base_tmp = bs_base
for mybs in clk_march_bs:
display(bslib.display(None, mybs^base_tmp))
base_tmp = mybs
```
Alriiiiight!! So As before, we got 1, 5, 13, 17 for the first mux. Then the second mux goes 0, 4, 8, 12, 16. Third 3, 7, 11, 15. And fourth 2, 6, 10, 14, 18.
```
tile = fuse_h4x.render_tile(fse, 82)
im = fuse_h4x.display(None, tile[-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
base_tmp = bs_base
for mybs in clk_march_bs:
clk_tiles = fuse_h4x.tile_bitmap(fse, mybs^base_tmp)
ctile = clk_tiles[(5, 9, 82)].astype('uint8')
im = fuse_h4x.display(None, (tile*ctile)[-2:,:])
display(im.resize((im.width*8, im.height*8), Image.NEAREST))
base_tmp = mybs
mt = fuse_h4x.tile_bitmap(fse, clk_march_bs[-1])
interesting = {(1,1): mt[(1, 1, 12)], (2,1): mt[(2, 1, 12)], (3,1): mt[(3, 1, 12)], (4, 1): mt[(4, 1, 12)]}
for idx, t in interesting.items():
row, col = idx
dbtile = db.grid[row][col]
bels, pips = gowin_unpack.parse_tile_(dbtile, t)
print(idx, pips['CLK0'])
```
This seems to confirm my earlier findings. I think the main thing that needs to be done is find the boundaries of each branch. Maybe I can use one full row, and then do a sweep on the next row/clock to see which tap it connects to. I just hope it doesn't randomly alternate which clock goes where.
Then I need to find some more clock sources to do it all again for the secondary clocks. And then I think it may be time to write the real fuzzer???
```
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i in range(2, 6):
for j in range(2, 20):
flop = dff(mod, cst, i, j)
bs_base, hdr, ftr, posp, config = tiled_fuzzer.run_pnr(mod, cst, {})
modules = []
constrs = []
for k in range(4):
for col in range(2, 20):
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i, clk in enumerate(clks[:-1]):
for j in range(2, 20):
flop = dff(mod, cst, 2+i, j)
if i < k:
mod.assigns.append((flop, clk))
elif i == k and j == col:
mod.assigns.append((flop, clk))
modules.append(mod)
constrs.append(cst)
p = Pool()
pnr_res = p.map(lambda param: tiled_fuzzer.run_pnr(*param, {}), zip(modules, constrs))
clks = {}
complete_taps = set()
for idx, (sweep_bs, *rest) in enumerate(pnr_res):
#display(bslib.display(None, sweep_bs^bs_base))
sweep_tiles = fuse_h4x.tile_bitmap(fse, sweep_bs^bs_base)
dff = set()
tap = None
gclk = idx//18
if idx and idx%18==0:
complete_taps.update(clks[gclk-1].keys())
#print("#"*80)
for loc, tile in sweep_tiles.items():
row, col, ttyp = loc
#print(row, idx//18)
#print(ttyp)
if row <= gclk: continue
if ttyp == 12:
#print("branch", col)
dff.add(col)
elif ttyp in {64, 13, 14}:
#print("tap", col)
if col not in complete_taps:
tap = col
clks.setdefault(gclk, {}).setdefault(tap, set()).update(dff)
#print(complete_taps, clks)
clks
```
I think this a pretty good place to switch to the real fuzzer. At this point it's clear which center muxes drive which spines/taps/branches, and how it all connects. The only thing that's missing is the secondary clocks, which is just more of the same. I just need to find some more clock inputs, maybe the PLL outputs?
Here is a diagram of my current understanding. The muxes in the center connect a clock pin to the left or right spines. Each specific spine has 2-3 taps per side, which route it up and down. At each row this tap can then be connected to a branch that spans several colums. The different colours in this diagram represent GB00-GB40 at the slice level.

|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/clock_experiments.ipynb
|
clock_experiments.ipynb
|
import re
import os
import sys
import tempfile
import subprocess
from collections import deque, Counter, namedtuple
from itertools import chain, count, zip_longest
from functools import reduce
from random import shuffle, seed
from warnings import warn
from math import factorial
import numpy as np
from multiprocessing.dummy import Pool
import pickle
import json
from shutil import copytree
from apycula import codegen
from apycula import bslib
from apycula import pindef
from apycula import fuse_h4x
from apycula.wirenames import wirenames, clknames, wirenumbers, clknumbers
#TODO proper API
#from apycula import dat19_h4x
from apycula import tm_h4x
from apycula import chipdb
from apycula import attrids
gowinhome = os.getenv("GOWINHOME")
if not gowinhome:
raise Exception("GOWINHOME not set")
# device = os.getenv("DEVICE")
device = sys.argv[1]
params = {
"GW1NS-2": {
"package": "LQFP144",
"device": "GW1NS-2C",
"partnumber": "GW1NS-UX2CLQ144C5/I4",
},
"GW1NS-4": {
"package": "QFN48",
"device": "GW1NSR-4C",
"partnumber": "GW1NSR-LV4CQN48PC7/I6",
},
"GW1N-9": {
"package": "PBGA256",
"device": "GW1N-9",
"partnumber": "GW1N-LV9PG256C6/I5",
},
"GW1N-9C": {
"package": "UBGA332",
"device": "GW1N-9C",
"partnumber": "GW1N-LV9UG332C6/I5",
},
"GW1N-4": {
"package": "PBGA256",
"device": "GW1N-4",
"partnumber": "GW1N-LV4PG256C6/I5",
},
"GW1N-1": {
"package": "LQFP144",
"device": "GW1N-1",
"partnumber": "GW1N-LV1LQ144C6/I5",
},
"GW1NZ-1": {
"package": "QFN48",
"device": "GW1NZ-1",
"partnumber": "GW1NZ-LV1QN48C6/I5",
},
"GW2A-18": {
"package": "PBGA256",
"device": "GW2A-18",
"partnumber": "GW2A-LV18PG256C8/I7",
},
"GW2A-18C": {
"package": "PBGA256S",
"device": "GW2AR-18C",
"partnumber": "GW2AR-LV18PG256SC8/I7",
},
}[device]
# utils
name_idx = 0
def make_name(bel, typ):
global name_idx
name_idx += 1
return f"inst{name_idx}_{bel}_{typ}"
def tbrl2rc(fse, side, num):
if side == 'T':
row = 0
col = int(num) - 1
elif side == 'B':
row = len(fse['header']['grid'][61])-1
col = int(num) - 1
elif side == 'L':
row = int(num) - 1
col = 0
elif side == 'R':
row = int(num) - 1
col = len(fse['header']['grid'][61][0])-1
return (row, col)
# Read the packer vendor log to identify problem with primitives/attributes
# returns dictionary {(primitive name, error code) : [full error text]}
_err_parser = re.compile("(\w+) +\(([\w\d]+)\).*'(inst[^\']+)\'.*")
def read_err_log(fname):
errs = {}
with open(fname, 'r') as f:
for line in f:
res = _err_parser.match(line)
if res:
line_type, code, name = res.groups()
text = res.group(0)
if line_type in ["Warning", "Error"]:
errs.setdefault((name, code), []).append(text)
return errs
# Result of the vendor router-packer run
PnrResult = namedtuple('PnrResult', [
'bitmap', 'hdr', 'ftr',
'constrs', # constraints
'config', # device config
'attrs', # port attributes
'errs' # parsed log file
])
def run_pnr(mod, constr, config):
cfg = codegen.DeviceConfig({
"use_jtag_as_gpio" : config.get('jtag', "1"),
"use_sspi_as_gpio" : config.get('sspi', "1"),
"use_mspi_as_gpio" : config.get('mspi', "1"),
"use_ready_as_gpio" : config.get('ready', "1"),
"use_done_as_gpio" : config.get('done', "1"),
"use_reconfign_as_gpio" : config.get('reconfig', "1"),
"use_mode_as_gpio" : config.get('mode', "1"),
"use_i2c_as_gpio" : config.get('i2c', "1"),
"bit_crc_check" : "1",
"bit_compress" : "0",
"bit_encrypt" : "0",
"bit_security" : "1",
"bit_incl_bsram_init" : "0",
"loading_rate" : "250/100",
"spi_flash_addr" : "0x00FFF000",
"bit_format" : "txt",
"bg_programming" : "off",
"secure_mode" : "0"})
opt = codegen.PnrOptions({
"gen_posp" : "1",
"gen_io_cst" : "1",
"gen_ibis" : "1",
"ireg_in_iob" : "0",
"oreg_in_iob" : "0",
"ioreg_in_iob" : "0",
"timing_driven" : "0",
"cst_warn_to_error" : "0"})
#"show_all_warn" : "1",
pnr = codegen.Pnr()
pnr.device = params['device']
pnr.partnumber = params['partnumber']
pnr.opt = opt
pnr.cfg = cfg
with tempfile.TemporaryDirectory() as tmpdir:
with open(tmpdir+"/top.v", "w") as f:
mod.write(f)
pnr.netlist = tmpdir+"/top.v"
with open(tmpdir+"/top.cst", "w") as f:
constr.write(f)
pnr.cst = tmpdir+"/top.cst"
with open(tmpdir+"/run.tcl", "w") as f:
pnr.write(f)
subprocess.run([gowinhome + "/IDE/bin/gw_sh", tmpdir+"/run.tcl"], cwd = tmpdir)
#print(tmpdir); input()
try:
return PnrResult(
*bslib.read_bitstream(tmpdir+"/impl/pnr/top.fs"),
constr,
config, constr.attrs,
read_err_log(tmpdir+"/impl/pnr/top.log"))
except FileNotFoundError:
print(tmpdir)
input()
return None
_tbrlre = re.compile(r"IO([TBRL])(\d+)")
def fse_iob(fse, db, pin_locations, diff_cap_info, locations):
iob_bels = {}
is_true_lvds = False
is_positive = False
for ttyp, tiles in pin_locations.items():
# tiles are unique, so one is enough but we need A&B pins
for tile, bels in tiles.items():
if len(bels) >= 2:
break
# crate all IO bels
is_simplified = len(bels) > 2
side, num = _tbrlre.match(tile).groups()
row, col = tbrl2rc(fse, side, num)
for bel_name in bels:
is_diff = False
if bel_name in diff_cap_info.keys():
is_diff, is_true_lvds, is_positive = diff_cap_info[bel_name]
bel = iob_bels.setdefault(ttyp, {}).setdefault(f'IOB{bel_name[-1]}', chipdb.Bel())
bel.simplified_iob = is_simplified
bel.is_diff = is_diff
bel.is_true_lvds = is_true_lvds
bel.is_diff_p = is_positive
print(f"type:{ttyp} [{row}][{col}], IOB{bel_name[-1]}, diff:{is_diff}, true lvds:{is_true_lvds}, p:{is_positive}")
for ttyp, bels in iob_bels.items():
for row, col in locations[ttyp]:
db.grid[row][col].bels.update(iob_bels[ttyp])
if __name__ == "__main__":
with open(f"{gowinhome}/IDE/share/device/{device}/{device}.fse", 'rb') as f:
fse = fuse_h4x.readFse(f)
with open(f"{device}.json") as f:
dat = json.load(f)
with open(f"{gowinhome}/IDE/share/device/{device}/{device}.tm", 'rb') as f:
tm = tm_h4x.read_tm(f, device)
db = chipdb.from_fse(device, fse, dat)
chipdb.set_banks(fse, db)
db.timing = tm
db.packages, db.pinout, db.pin_bank = chipdb.json_pinout(device)
corners = [
(0, 0, fse['header']['grid'][61][0][0]),
(0, db.cols-1, fse['header']['grid'][61][0][-1]),
(db.rows-1, db.cols-1, fse['header']['grid'][61][-1][-1]),
(db.rows-1, 0, fse['header']['grid'][61][-1][0]),
]
locations = {}
for row, row_dat in enumerate(fse['header']['grid'][61]):
for col, typ in enumerate(row_dat):
locations.setdefault(typ, []).append((row, col))
pin_names = pindef.get_locs(device, params['package'], True)
edges = {'T': fse['header']['grid'][61][0],
'B': fse['header']['grid'][61][-1],
'L': [row[0] for row in fse['header']['grid'][61]],
'R': [row[-1] for row in fse['header']['grid'][61]]}
pin_locations = {}
pin_re = re.compile(r"IO([TBRL])(\d+)([A-Z])")
for name in pin_names:
side, num, pin = pin_re.match(name).groups()
ttyp = edges[side][int(num)-1]
ttyp_pins = pin_locations.setdefault(ttyp, {})
ttyp_pins.setdefault(name[:-1], set()).add(name)
pnr_empty = run_pnr(codegen.Module(), codegen.Constraints(), {})
db.cmd_hdr = pnr_empty.hdr
db.cmd_ftr = pnr_empty.ftr
db.template = pnr_empty.bitmap
# IOB
diff_cap_info = pindef.get_diff_cap_info(device, params['package'], True)
fse_iob(fse, db, pin_locations, diff_cap_info, locations);
chipdb.dat_portmap(dat, db, device)
# XXX GW1NR-9 has interesting IOBA pins on the bottom side
if device == 'GW1N-9' :
loc = locations[52][0]
bel = db.grid[loc[0]][loc[1]].bels['IOBA']
bel.portmap['GW9_ALWAYS_LOW0'] = wirenames[dat[f'IologicAIn'][40]]
bel.portmap['GW9_ALWAYS_LOW1'] = wirenames[dat[f'IologicAIn'][42]]
chipdb.dat_aliases(dat, db)
# GSR
if device in {'GW2A-18', 'GW2A-18C'}:
db.grid[27][50].bels.setdefault('GSR', chipdb.Bel()).portmap['GSRI'] = 'C4';
elif device in {'GW1N-1', 'GW1N-4', 'GW1NS-4', 'GW1N-9', 'GW1N-9C', 'GW1NS-2', 'GW1NZ-1'}:
db.grid[0][0].bels.setdefault('GSR', chipdb.Bel()).portmap['GSRI'] = 'C4';
else:
raise Exception(f"No GSR for {device}")
#TODO proper serialization format
with open(f"{device}_stage1.pickle", 'wb') as f:
pickle.dump(db, f)
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/tiled_fuzzer.py
|
tiled_fuzzer.py
|
from itertools import chain
class Module:
def __init__(self):
self.inputs = set()
self.outputs = set()
self.inouts = set()
self.wires = set()
self.assigns = []
self.primitives = {}
def __add__(self, other):
m = Module()
m.inputs = self.inputs | other.inputs
m.outputs = self.outputs | other.outputs
m.inouts = self.inouts | other.inouts
m.wires = self.wires | other.wires
m.assigns = self.assigns + other.assigns
m.primitives = {**self.primitives, **other.primitives}
return m
def write(self, f):
f.write("module top(")
first = True
for port in chain(self.inputs, self.outputs, self.inouts):
if not first:
f.write(", ")
first = False
f.write(port)
f.write(");\n")
for port in self.inputs:
f.write("input {};\n".format(port))
for port in self.outputs:
f.write("output {};\n".format(port))
for port in self.inouts:
f.write("inout {};\n".format(port))
for wire in self.wires:
f.write("wire {};\n".format(wire))
# unique assignments or not
#for dest, src in self.assigns:
for dest, src in dict(self.assigns).items():
f.write("assign {} = {};\n".format(dest, src))
for module in self.primitives.values():
module.write(f)
f.write("endmodule\n")
class Primitive:
def __init__(self, typ, inst):
self.typ = typ
self.inst = inst
self.portmap = {}
self.params = {}
def write(self, f):
f.write("{} {} (".format(self.typ, self.inst))
first = True
for port, wire in self.portmap.items():
if not first:
f.write(",")
first = False
if isinstance(wire, list):
wire = "{" + ", ".join([x for x in wire]) + "}"
f.write("\n.{}({})".format(port, wire))
f.write("\n);\n")
for key, val in self.params.items():
f.write("defparam {}.{} = {};\n".format(self.inst, key, val))
class Constraints:
def __init__(self):
self.cells = {}
self.ports = {}
self.attrs = {}
self.clocks = {}
def __add__(self, other):
cst = Constraints()
cst.cells = {**self.cells, **other.cells}
cst.ports = {**self.ports, **other.ports}
cst.attrs = {**self.attrs, **other.attrs}
cst.clocks = {**self.clocks, **other.clocks}
return cst
def write(self, f):
for key, val in self.cells.items():
row, col, side, lut = val
f.write("INS_LOC \"{}\" R{}C{}[{}][{}];\n".format(key, row, col, side, lut))
for key, val in self.ports.items():
f.write("IO_LOC \"{}\" {};\n".format(key, val))
for key, val in self.attrs.items():
f.write("IO_PORT \"{}\" ".format(key))
for attr, attr_value in val.items():
f.write("{}={} ".format(attr, attr_value))
f.write(";\n");
for key, val in self.clocks.items():
f.write("CLOCK_LOC \"{}\" {};\n".format(key, val))
class DeviceConfig:
def __init__(self, settings):
self.settings = settings
@property
def text(self):
return "".join([' -' + name + ' ' + val for name, val in self.settings.items()])
class PnrOptions:
def __init__(self, options):
self.options = options
@property
def text(self):
return "".join([' -' + name + ' ' + val for name, val in self.options.items()])
class Pnr:
def __init__(self):
self.cst = None
self.netlist = None
self.cfg = None
self.device = None
self.partnumber = None
self.opt = None
def write(self, f):
template = """
add_file -type cst {cst}
add_file -type netlist {netlist}
set_device {device_desc}
set_option {opt}
run pnr
"""
device_desc = self.partnumber
if self.device in ['GW1N-9', 'GW1N-4', 'GW1N-9C', 'GW2A-18', 'GW2A-18C']:
device_desc = f'-name {self.device} {device_desc}'
f.write(template.format(
cst=self.cst,
netlist=self.netlist,
device=self.device,
device_desc=device_desc,
opt=self.opt.text + self.cfg.text))
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/codegen.py
|
codegen.py
|
from math import ceil
import numpy as np
from crcmod.predefined import mkPredefinedCrcFun
crc16arc = mkPredefinedCrcFun('crc-16')
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def bytearr(frame):
"array of all bytes of the frame"
return bytearray([int(n, base=2) for n in chunks(frame.strip(), 8)])
def bitarr(frame, pad):
"Array of *content* bits"
data = frame.strip()[pad:-64]
return [int(n, base=2) for n in data]
def read_bitstream(fname):
bitmap = []
hdr = []
ftr = []
is_hdr = True
crcdat = bytearray()
preamble = 3
frames = 0
with open(fname) as inp:
for line in inp:
if line.startswith("//"): continue
ba = bytearr(line)
if not frames:
if is_hdr:
hdr.append(ba)
else:
ftr.append(ba)
if not preamble and ba[0] != 0xd2: # SPI address
crcdat.extend(ba)
if not preamble and ba[0] == 0x3b: # frame count
frames = int.from_bytes(ba[2:], 'big')
is_hdr = False
if not preamble and ba[0] == 0x06: # device ID
if ba == b'\x06\x00\x00\x00\x11\x00\x58\x1b':
padding = 4
elif ba == b'\x06\x00\x00\x00\x11\x00H\x1b':
padding = 4
elif ba == b'\x06\x00\x00\x00\x09\x00\x28\x1b':
padding = 0
elif ba == b'\x06\x00\x00\x00\x01\x008\x1b':
padding = 0
elif ba == b'\x06\x00\x00\x00\x01\x00h\x1b':
padding = 0
elif ba == b'\x06\x00\x00\x00\x03\x00\x18\x1b':
padding = 0
elif ba == b'\x06\x00\x00\x00\x01\x00\x98\x1b':
padding = 0
elif ba == b'\x06\x00\x00\x00\x00\x00\x08\x1b':
padding = 0
else:
raise ValueError("Unsupported device", ba)
preamble = max(0, preamble-1)
continue
crcdat.extend(ba[:-8])
crc1 = (ba[-7] << 8) + ba[-8]
crc2 = crc16arc(crcdat)
assert crc1 == crc2, f"Not equal {crc1} {crc2}"
crcdat = ba[-6:]
bitmap.append(bitarr(line, padding))
frames = max(0, frames-1)
return np.fliplr(np.array(bitmap)), hdr, ftr
def compressLine(line, key8Z, key4Z, key2Z):
newline = []
for i in range(0, len(line), 8):
val = line[i:i+8].tobytes().replace(8 * b'\x00', bytes([key8Z]))
val = val.replace(4 * b'\x00', bytes([key4Z]))
newline += val.replace(2 * b'\x00', bytes([key2Z]))
return newline
def write_bitstream(fname, bs, hdr, ftr, compress):
bs = np.fliplr(bs)
if compress:
padlen = (ceil(bs.shape[1] / 64) * 64) - bs.shape[1]
else:
padlen = bs.shape[1] % 8
pad = np.ones((bs.shape[0], padlen), dtype=np.uint8)
bs = np.hstack([pad, bs])
assert bs.shape[1] % 8 == 0
bs=np.packbits(bs, axis=1)
if compress:
# search for smallest values not used in the bitstream
lst, _ = np.histogram(bs, bins=[i for i in range(256)])
[key8Z, key4Z, key2Z] = [i for i,val in enumerate(lst) if val==0][0:3]
# update line 0x51 with keys
hdr51 = int.from_bytes(hdr[5], 'big') & ~0xffffff
hdr51 = hdr51 | (key8Z << 16) | (key4Z << 8) | (key2Z)
hdr[5] = bytearray.fromhex(f"{hdr51:016x}")
crcdat = bytearray()
preamble = 3
with open(fname, 'w') as f:
for ba in hdr:
if not preamble and ba[0] != 0xd2: # SPI address
crcdat.extend(ba)
preamble = max(0, preamble-1)
f.write(''.join(f"{b:08b}" for b in ba))
f.write('\n')
for ba in bs:
if compress:
ba = compressLine(ba, key8Z, key4Z, key2Z)
f.write(''.join(f"{b:08b}" for b in ba))
crcdat.extend(ba)
crc = crc16arc(crcdat)
crcdat = bytearray(b'\xff'*6)
f.write(f"{crc&0xff:08b}{crc>>8:08b}")
f.write('1'*48)
f.write('\n')
for ba in ftr:
preamble = max(0, preamble-1)
f.write(''.join(f"{b:08b}" for b in ba))
f.write('\n')
def display(fname, data):
from PIL import Image
im = Image.frombytes(
mode='1',
size=data.shape[::-1],
data=np.packbits(data, axis=1))
if fname:
im.save(fname)
return im
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/bslib.py
|
bslib.py
|
iob_attrids = {
'IO_TYPE': 0,
'SLEWRATE': 1, # *
'PULLMODE': 2, # *
'DRIVE': 3, # *
'OPENDRAIN': 4, # *
'HYSTERESIS': 5, # *
'CLAMP': 6, # *
'DIFFRESISTOR': 7, # *
'SINGLERESISTOR': 8, # *
'VREF': 9, # *
'VCCIO': 10,
'DIFFDRIVE': 11,
'I3C_MODE': 12,
'MIPI_INPUT': 13,
'MIPI_OUTPUT': 14,
'DRIVE_LEVEL': 15,
'LVDS_OUT': 16, # *
'LVDS_VCCIO': 17,
'DDR_DYNTERM': 18,
'IO_BANK': 19, # *
'PERSISTENT': 20, # *
'TO': 21,
'ODMUX': 22,
'ODMUX_1': 23,
'PADDI': 24,
'PG_MUX': 25,
'DATA_MUX': 26,
'TRI_MUX': 27,
'TRIMUX_PADDT': 28,
'IOBUF_PADDI': 29,
'USED': 30, # *
'IOBUF_OVERDRIVE': 31,
'IOBUF_UNDERDRIVE': 32,
'IOBUF_LVDS25_VCCIO': 33,
'IN12_MODE': 34,
'OD': 35,
'LPRX_A1': 36,
'LPRX_A2': 37,
'MIPI': 38,
'LVDS_SEL': 39,
'VLDS_ON': 40,
'IOBUF_MIPI_LP': 41,
'IOBUF_ODT_RESISTOR': 42,
'IOBUF_CIB_CONTROL': 43,
'IOBUF_INR_MODE': 44,
'IOBUF_STDBY_LVDS_MODE': 45,
'IOBUF_IODUTY': 46,
'IOBUF_ODT_DYNTERM': 47,
'MIPI_IBUF_DRIVE': 48,
'MIPI_IBUF_DRIVE_LEVEL': 49
}
iob_attrvals = {
'UNKNOWN': 0, # possible a dummy value for line 0 in logicinfo?
# standard
'MIPI': 1,
'BLVDS25E': 2,
'BLVDS25': 3,
'BLVDS_E': 4,
'HSTL': 5,
'HSTL_D': 6,
'HSTL15_I': 7,
'HSTL15D_I': 8,
'HSTL18_I': 9,
'HSTL18_II': 10,
'HSTL18D_I': 11,
'HSTL18D_II': 12,
'SSTL': 13,
'SSTL_D': 14,
'SSTL15': 15,
'SSTL15D': 16,
'SSTL18_I': 17,
'SSTL18_II': 18,
'SSTL18D_I': 19,
'SSTL18D_II': 20,
'SSTL25_I': 21,
'SSTL25_II': 22,
'SSTL25D_I': 23,
'SSTL25D_II': 24,
'SSTL33_I': 25,
'SSTL33_II': 26,
'SSTL33D_I': 27,
'SSTL33D_II': 28,
'LVCMOS12': 29,
'LVCMOS15': 30,
'LVCMOS18': 31,
'LVCMOS25': 32,
'LVCMOS33': 33,
'LVCMOS_D': 34,
'LVCMOS12D': 35,
'LVCMOS15D': 36,
'LVCMOS18D': 37,
'LVCMOS25D': 38,
'LVCMOS33D': 39,
'LVDS': 40,
'LVDS_E': 41,
'LVDS25': 42,
'LVDS25E': 43,
'LVPECL33': 44,
'LVPECL33E': 45,
'LVTTL33': 46,
'MLVDS25': 47,
'MLVDS_E': 48,
'MLVDS25E': 49,
'RSDS25E': 50,
'PCI33': 51,
'RSDS': 52,
'RSDS25': 53,
'RSDS_E': 54,
'MINILVDS': 55,
'PPLVDS': 56,
# vref
'VREF1_DRIVER': 57,
'VREF2_DRIVER': 58,
#
'LVCMOS33OD25': 59,
'LVCMOS33OD18': 60,
'LVCMOS33OD15': 61,
'LVCMOS25OD18': 62,
'LVCMOS25OD15': 63,
'LVCMOS18OD15': 64,
'LVCMOS15OD12': 65,
'LVCMOS25UD33': 66,
'LVCMOS18UD25': 67,
'LVCMOS18UD33': 68,
'LVCMOS15UD18': 69,
'LVCMOS15UD25': 70,
'LVCMOS15UD33': 71,
'LVCMOS12UD15': 72,
'LVCMOS12UD18': 73,
'LVCMOS12UD25': 74,
'LVCMOS12UD33': 75,
'VREF1_LOAD': 76,
'VREF2_LOAD': 77,
#
'ENABLE': 78,
'TRIMUX': 79,
'PADDI': 80,
'PGBUF': 81,
'0': 82,
'1': 83,
'SIG': 84,
'INV': 85,
'TO': 86,
# voltage
'1.2': 87,
'1.25': 88,
'1.5': 89,
'1.8': 90,
'2.0': 91,
'2.5': 92,
'3.3': 93,
'3.5': 94,
# mA
'2': 95,
'4': 96,
'6': 97,
'8': 98,
'12': 99,
'16': 100,
'20': 101,
'24': 102,
# XXX ?
'80': 103,
'100': 104,
'120': 105,
#
'NA': 106,
'ON': 107,
'OFF': 108,
# XXX
'PCI': 109,
# histeresis
'HIGH': 110,
'H2L': 111,
'L2H': 112,
# pullmode
'DOWN': 113,
'KEEPER': 114,
'NONE': 115,
'UP': 116,
# slew
'FAST': 117,
'SLOW': 118,
# ?IO_BANK?
'I45': 119,
'I50': 120,
'I55': 121,
'TSREG': 122,
'TMDDR': 123,
'OD1': 124,
'OD2': 125,
'OD3': 126,
'UD1': 127,
'UD3': 128,
# resistor?
'INTERNAL': 129,
'SINGLE': 130,
'DIFF': 131,
#
'IN12': 132,
'UD2': 133,
'LVPECL_E': 134,
#
'68': 135,
'3': 136,
'5': 137,
'7': 138,
'9': 139,
'10': 140,
'11': 141,
'4.5': 142,
'MIPI_IBUF': 143,
'1.35': 144,
'5.5': 145,
'6.5': 146,
'10.5': 147,
'13.5': 148,
'14': 149,
# more standard
'TMDS33': 150,
'LPDDR': 151,
'HSUL12': 152,
'HSUL12D': 153,
'HSTL12_I': 154,
'HSTL15_II': 155,
'HSTL15D_II': 156,
'SSTL12': 157,
'SSTL135': 158,
'SSTL135D': 159,
'LVCMOS10': 160,
'LVCMOS33OD12': 161,
'LVCMOS25OD12': 162,
'LVCMOS18OD12': 163,
}
# ADC
adc_attrids = {
'EN': 0,
'VCCX': 1,
'IOVREF': 2,
'VREF': 3,
'USED_FLAG': 4,
}
adc_attrvals = {
'UNKNOWN': 0,
'ENABLE': 1,
'3.3': 2,
'2.80': 3,
'2.55': 4,
'2.39': 5,
'2.23': 6,
'1.81': 7,
'1.65': 8,
'2.5': 9,
'2.12': 10,
'1.94': 11,
'1.69': 12,
'1.37': 13,
'1.25': 14,
'1.8': 15,
'1.53': 16,
'1.39': 17,
'1.30': 18,
'1.21': 19,
'0.99': 20,
'0.9': 21,
'ON': 22
}
# BSRAM
bsram_attrids = {
'CEMUX_CEA': 0,
'CEMUX_CEB': 1,
'CLKMUX_CLKA': 2,
'CLKMUX_CLKB': 3,
'CSA2': 4,
'CSA_0': 5,
'CSA_1': 6,
'CSA_2': 7,
'CSB2': 8,
'CSB_0': 9,
'CSB_1': 10,
'CSB_2': 11,
'DBLWA': 12,
'DBLWB': 13,
'GSR': 14,
'MODE': 15,
'OUTREG_ASYNC': 16,
'OUTREG_CEA': 17,
'OUTREG_CEB': 18,
'PORTB_IBEH': 19,
'REGSET_RSTA': 20,
'REGSET_RSTB': 21,
'REGSET_WEB': 22,
'SYNC': 23,
'WEMUX_WEA': 24,
'WEMUX_WEB': 25,
'DPA_DATA_WIDTH': 26,
'DPB_DATA_WIDTH': 27,
'DPA_BEHB': 28,
'DPA_BELB': 29,
'DPA_MODE': 30,
'DPA_REGMODE': 31,
'DPB_BEHB': 32,
'DPB_BELB': 33,
'DPB_MODE': 34,
'DPB_REGMODE': 35,
'SDPA_DATA_WIDTH': 36,
'SDPB_DATA_WIDTH': 37,
'SDPA_BEHB': 38,
'SDPA_BELB': 39,
'SDPA_MODE': 40,
'SDPA_REGMODE': 41,
'SDPB_BEHB': 42,
'SDPB_BELB': 43,
'SDPB_MODE': 44,
'SDPB_REGMODE': 45,
'SPA_DATA_WIDTH': 46,
'SPB_DATA_WIDTH': 47,
'SPA_BEHB': 48,
'SPA_BELB': 49,
'SPB_BEHB': 50,
'SPB_BELB': 51,
'SPA_MODE': 52,
'SPA_REG_MODE': 53,
'SPB_MODE': 54,
'SPB_REG_MODE': 55,
'ROMA_DATA_WIDTH': 56,
'ROMB_DATA_WIDTH': 57,
'ROM_DATA_WIDTH': 58,
'ROM_PORTA_BEHB': 59,
'ROM_PORTA_BELB': 60,
'ROM_PORTA_REGMODE':61,
'ROM_PORTB_REGMODE':62,
'PORTB_BELB': 63,
'PORTA_MODE': 64,
'PORTB_MODE': 65,
'PORTB_BEHB': 66
}
bsram_attrvals = {
'UNKNOWN': 0,
'INV': 1,
'ENABLE': 2,
'SET': 3,
'X36': 4,
'1': 5,
'2': 6,
'4': 7,
'9': 8,
'16': 9,
'RBW': 10,
'WT': 11,
'OUTREG': 12,
'DISABLE': 13,
'RESET': 14
}
# slice
cls_attrids = {
'MODE': 0,
'REGMODE': 1,
'SRMODE': 2,
'GSR': 3,
'LSRONMUX': 4,
'CEMUX_1': 5,
'CEMUX_CE': 6,
'CLKMUX_1': 7,
'CLKMUX_CLK': 8,
'LSR_MUX_1': 9,
'LSR_MUX_LSR': 10,
'REG0_SD': 11,
'REG1_SD': 12,
'REG0_REGSET': 13,
'REG1_REGSET': 14
}
cls_attrvals = {
'UNKNOWN': 0,
'0': 1,
'1': 2,
'SIG': 3,
'INV': 4,
'ENGSR': 5,
'DISGSR': 6,
'LSRMUX': 7,
'LUT': 8,
'LOGIC': 9,
'ALU': 10,
'SSRAM': 11,
'FF': 12,
'LATCH': 13,
'ASYNC': 14,
'LSR_OVER_CE': 15,
'SET': 16,
'RESET': 17
}
# DLL
dll_attrids = {
'CLKSEL': 0,
'CODESCAL': 1,
'CODESCALEN': 2,
'DIVSEL': 3,
'FORCE': 4,
'GSR': 5,
'ROSC': 6,
'ROUNDOFF': 7,
'RSTPOL': 8,
'CLKMUX_SYSCLK': 9
}
dll_attrvals = {
'UNKNOWN': 0,
'HECLK0': 1,
'HECLK1': 2,
'HECLK2': 3,
'HECLK3': 4,
'SYSCLK': 5,
'POS_22': 6,
'POS_33': 7,
'POS_44': 8,
'NEG_11': 9,
'NEG_22': 10,
'NEG_33': 11,
'NEG_44': 12,
'ENABLE': 13,
'FAST': 14,
'DISABLE': 15,
'NOINV': 16,
'POS_11': 17,
'INV': 18
}
# PLL
pll_attrids = {
'BYPCK': 0,
'BYPCKDIV': 1,
'BYPCKPS': 2,
'CLKOUTDIV3': 3,
'CLKOUTDIV3SEL': 4,
'CLKOUTDIV': 5,
'CLKOUTDIVSEL': 6,
'CLKOUTPS': 7,
'CRIPPLE': 8,
'DUTY': 9,
'DUTYSEL': 10,
'DPSEL': 11,
'FBSEL': 12,
'FDIV': 13,
'FDIVSEL': 14,
'FDLYPWD': 15,
'FLDCOUNT': 16,
'FLOCK': 17,
'FLTOP': 18,
'GMCGAIN': 19,
'GMCMODE': 20,
'GMCOUT': 21,
'GMCVREF': 22,
'ICPSEL': 23,
'IDIV': 24,
'IDIVSEL': 25,
'INSEL': 26,
'IRSTEN': 27,
'KVCO': 28,
'LPR': 29,
'ODIV': 30,
'ODIVSEL': 31,
'OPDLY': 32,
'OSDLY': 33,
'PASEL': 34,
'PDN': 35,
'PHASE': 36,
'PLOCK': 37,
'PSDLY': 38,
'PWDEN': 39,
'RSTEN': 40,
'RSTLF': 41,
'SDIV': 42,
'SELIN': 43,
'SFTDLY': 44,
'SRSTEN': 45,
'CLKMUX_CLKIN2': 46,
'CLKMUX_CLKIN1': 47,
'CLKMUX_CLKFB0': 48,
'PLLVCC0': 49,
'PLLVCC0_BYPASS': 50,
'PLLVCC0_TRIM0': 51,
'PLLVCC0_TRIM1': 52,
'PLLVCC1': 53,
'PLLVCC1_BYPASS': 54,
'PLLVCC1_TRIM0': 55,
'PLLVCC1_TRIM1': 56,
'VCOBIAS_EN_D': 57,
'VCOBIAS_EN_U': 58,
'DIVA': 59,
'DIVB': 60,
'DIVC': 61,
'DIVD': 62,
'DPAEN': 63,
'DUTY_TRIM_A': 64,
'DUTY_TRIM_B': 65,
'ICPDYN_SEL': 66,
'LPR_SEL': 67,
'INTFB': 68,
'MON': 69,
'CKA': 70,
'CKB': 71,
'CKC': 72,
'CKD': 73,
'CKA_OUT': 74,
'CKB_OUT': 75,
'CKC_OUT': 76,
'CKD_OUT': 77,
'CKA_IN': 78,
'CKB_IN': 79,
'CKC_IN': 80,
'CKD_IN': 81,
'PSA_COARSE': 82,
'PSB_COARSE': 83,
'PSC_COARSE': 84,
'PSD_COARSE': 85,
'PSA_FINE': 86,
'PSB_FINE': 87,
'PSC_FINE': 88,
'PSD_FINE': 89,
'DTA_SEL': 90,
'DTB_SEL': 91,
'PSA_SEL': 92,
'PSB_SEL': 93,
'PSC_SEL': 94,
'PSD_SEL': 95,
'DIVA_SEL': 96,
'DIVB_SEL': 97,
'DIVC_SEL': 98,
'DIVD_SEL': 99,
'DTMS_ENA': 100,
'DTMS_ENB': 101,
'DTMS_ENC': 102,
'DTMS_END': 103,
'VCCREG_TRIM0': 104,
'VCCREG_TRIM1': 105,
'PLLREG0': 106,
}
pll_attrvals = {
'UNKNOWN': 0,
'BYPASS': 1,
'DISABLE': 2,
'ENABLE': 3,
'CLKOUTPS': 4,
'C1': 5,
'C2': 6,
'C3': 7,
'DYN': 8,
'PWD': 9,
'CLKFB0': 10,
'CLKFB1': 11,
'CLKFB2': 12,
'CLKFB3': 13,
'CLKFB4': 14,
'CLKFN0': 15,
'FORCE0': 16,
'FORCE1': 17,
'CLKIN0': 18,
'CLKIN1': 19,
'CLKIN2': 20,
'CLKIN3': 21,
'CLKIN4': 22,
'R1': 23,
'R2': 24,
'R3': 25,
'R4': 26,
'R5': 27,
'R6': 28,
'R7': 29,
'RESET': 30,
'INV': 31,
'0': 32,
'P0': 33,
'P50': 34,
'P100': 35,
'P200': 36,
'M0': 37,
'M50': 38,
'M100': 39,
'M200': 40,
'CKB': 41,
'CKC': 42,
'CKD': 43,
'VSO': 44,
'CASCADE': 45,
'ICLK': 46,
'FCLK': 47,
'CLKOUT': 48
}
#OSCillator
osc_attrids = {
'MCLKCIB': 0,
'NORMAL': 1,
'POWER_SAVE': 2,
'USERPOWER_SAVE': 3,
'MCLKCIB_EN': 4,
'TRIM': 5,
'OSCREG': 6, # I guess it is REGULATOR_EN
'MCK2PLL': 7,
'USED_FLAG': 10
}
osc_attrvals = {
'UNKNOWN': 0,
'ENABLE': 1,
'ON': 2
}
# config
cfg_attrids = {
'DONE_AS_GPIO': 0,
'GSR': 2,
'JTAG_AS_GPIO': 6,
'READY_AS_GPIO': 7,
'MSPI_AS_GPIO': 8,
'RECONFIG_AS_GPIO': 9,
'SSPI_AS_GPIO': 10,
'I2C_AS_GPIO': 20,
'JTAG_EN': 21,
'POR': 24, # power on reset
}
cfg_attrvals = {
'UNKNOWN': 0,
'YES': 1,
'ACTIVE_LOW': 2,
'F0': 3,
'F1': 4,
'F2': 5,
'F3': 6,
'USED': 7,
'UNUSED': 8,
'FALSE': 9
}
# global set/reset
gsr_attrids = {
'GSRMODE': 0,
'SYNCMODE': 1,
}
gsr_attrvals = {
'UNKNOWN': 0,
'ACTIVE_LOW': 1,
'SYNC': 2,
}
# iologic
iologic_attrids = {
'INMODE': 0,
'OUTMODE': 1,
'SRMODE': 2,
'CLKIDDRMUX': 3,
'DELMUX': 4,
'GSR': 5,
'TSHX': 6,
'MARGINTEST': 7,
'CEMUX_CE': 8,
'CEIMUX_1': 9,
'CEOMUX_1': 10,
'CLKMUX_CLK': 11,
'CLKIMUX_1': 12,
'CLKIMUX_CLK': 13,
'CLKOMUX_1': 14,
'CLKOMUX_CLK': 15,
'CLKIDDRMUX_CLKIDDR': 16,
'CLKODDRMUX_CLKODDR': 17,
'CLKODDRMUX_CLKOMUX': 18,
'LSRMUX_LSR': 19,
'LSRIMUX_0': 20,
'LSROMUX_0': 21,
'TSMUX_1': 22,
'TSMUX_TS': 23,
'FF_INREGMODE': 24,
'IREG_INREGMODE': 25,
'IREG_REGSET': 26,
'OREG_OUTREGMODE': 27,
'OREG_REGSET': 28,
'TREG_INREGMODE': 29,
'TREG_OUTREGMODE': 30,
'TREG_REGSET': 31,
'DELAY_DEL0': 32,
'DELAY_DEL1': 33,
'DELAY_DEL2': 34,
'DELAY_DEL3': 35,
'DELAY_DEL4': 36,
'DELAY_DEL5': 37,
'DELAY_DEL6': 38,
'IMON': 39,
'IMON_CENTSEL': 40,
'IMON_SDR': 41,
'ISIDEL': 42,
'IMARG': 43,
'UPDATE': 44,
'INDEL': 45,
'OUTDEL': 46,
'FIFO': 47,
'SGMII': 48,
'ISI': 49,
'CEIOMUX_CE': 50,
'ECLKMUX_CLK': 51,
'CLKODDRMUX_ECLK': 52,
'CLKODDRMUX_WRCLK': 53,
'CLKIDDRMUX_ECLK': 54,
'CLKODDRMUX_WRCLKCLKODDR': 55,
'CLKIMUX': 56,
'CLKOMUX': 57,
'OUTCLK': 58,
'OUTSEL0': 59,
'OUTSEL1': 60,
'DYNAMICCIBCONTROL': 61,
'IODELAY_CIB': 62,
'DELAYCHAIN': 63,
'DLYMUX_MUX0': 64,
'DLYMUX_MUX1': 65,
'INDEL_0': 66,
'INDEL_1': 67,
'IMON_CENTSEL_0': 68,
'IMON_CENTSEL_1': 69,
}
iologic_attrvals = {
'UNKNOWN': 0,
'0': 1,
'1': 2,
'INV': 3,
'SIG': 4,
'IDDR_ODDR': 5,
'IDDR_OREG': 6,
'IREG_ODDR': 7,
'IREG_OREG': 8,
'IDDRX1': 9,
'IDDRX2': 10,
'IDDRX4': 11,
'IDDRX5': 12,
'ODDRX1': 13,
'ODDRX2': 14,
'ODDRX4': 15,
'ODDRX5': 16,
'ODDRX7': 17,
'ODDRXN': 18,
'MIDDRX1': 19,
'MIDDRX2': 20,
'MIDDRX4': 21,
'MODDRX1': 22,
'MODDRX2': 23,
'MODDRX4': 24,
'MODDRX21': 25,
'MODDRX22': 26,
'MODDRXN': 27,
'MOSHX20': 28,
'MOSHX22': 29,
'MOSHX4': 30,
'MTSHX21': 31,
'MTSHX22': 32,
'MTSHX4': 33,
'MTXHX21': 34,
'MTXHX1': 35,
'DDRENABLE': 36,
'CDRCLK': 37,
'ECLK0': 38,
'ECLK1': 39,
'NEXTCLK': 40,
'DQSW': 41,
'DQSW270': 42,
'DLY': 43,
'DQR90': 44,
'ENGSR': 45,
'DISGSR': 46,
'EDGE': 47,
'FIFO': 48,
'OREG': 49,
'VIDEORX': 50,
'ASYNC': 51,
'LSR_OVER_CE': 52,
'FF': 53,
'LATCH': 54,
'SET': 55,
'RESET': 56,
'MTSH1': 57,
'MTSH2': 58,
'MTSH4': 59,
'TREG': 60,
'ENABLE': 61,
'SAME': 62,
'25PS': 63,
'50PS': 64,
'100PS': 65,
'ONE': 66,
'IDDRX8': 67,
'ODDRX8': 68,
'DIRECTIONB': 69,
'MOVEB': 70,
'MONDISLIVEA0': 71,
'MONDISLIVEA1': 72,
}
# num -> attr name
iologic_num2val = {v: k for k, v in iologic_attrvals.items()}
iob_num2val = {v: k for k, v in iob_attrvals.items()}
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/attrids.py
|
attrids.py
|
import sys
import os
import re
import pickle
import gzip
import itertools
import math
import numpy as np
import json
import argparse
import importlib.resources
from collections import namedtuple
from contextlib import closing
from apycula import codegen
from apycula import chipdb
from apycula.chipdb import add_attr_val, get_shortval_fuses, get_longval_fuses, get_bank_fuses
from apycula import attrids
from apycula import bslib
from apycula.wirenames import wirenames, wirenumbers
device = ""
pnr = None
is_himbaechel = False
# Sometimes it is convenient to know where a port is connected to enable
# special fuses for VCC/VSS cases.
# This is not the optimal place for it - resources for routing are taken anyway
# and it should be done in nextpnr (as well as at yosys level to identify
# inverters since we can invert inputs without LUT in many cases), but for now
# let it be here to work out the mechanisms.
# Do not use for IOBs - their wires may be disconnected by IOLOGIC
_vcc_net = []
_gnd_net = []
def is_gnd_net(wire):
return wire in _gnd_net
def is_vcc_net(wire):
return wire in _vcc_net
def is_connected(wire, connections):
return len(connections[wire]) != 0
### IOB
def iob_is_gnd_net(flags, wire):
return flags.get(f'NET_{wire}', False) == 'GND'
def iob_is_vcc_net(flags, wire):
return flags.get(f'NET_{wire}', False) == 'VCC'
def iob_is_connected(flags, wire):
return f'NET_{wire}' in flags
_verilog_name = re.compile(r"^[A-Za-z_0-9][A-Za-z_0-9$]*$")
def sanitize_name(name):
retname = name
if name[-3:] == '_LC':
retname = name[:-3]
elif name[-6:] == '_DFFLC':
retname = name[:-6]
elif name[-4:] == '$iob':
retname = name[:-4]
if _verilog_name.fullmatch(retname):
return retname
return f"\{retname} "
def extra_pll_bels(cell, row, col, num, cellname):
# rPLL can occupy several cells, add them depending on the chip
offx = 1
if device in {'GW1N-9C', 'GW1N-9', 'GW2A-18', 'GW2A-18C'}:
if int(col) > 28:
offx = -1
for off in [1, 2, 3]:
yield ('RPLLB', int(row), int(col) + offx * off, num,
cell['parameters'], cell['attributes'], sanitize_name(cellname) + f'B{off}', cell)
elif device in {'GW1N-1', 'GW1NZ-1', 'GW1N-4'}:
for off in [1]:
yield ('RPLLB', int(row), int(col) + offx * off, num,
cell['parameters'], cell['attributes'], sanitize_name(cellname) + f'B{off}', cell)
def get_bels(data):
later = []
if is_himbaechel:
belre = re.compile(r"X(\d+)Y(\d+)/(?:GSR|LUT|DFF|IOB|MUX|ALU|ODDR|OSC[ZFHWO]?|BUFS|RAM16SDP4|RAM16SDP2|RAM16SDP1|PLL|IOLOGIC)(\w*)")
else:
belre = re.compile(r"R(\d+)C(\d+)_(?:GSR|SLICE|IOB|MUX2_LUT5|MUX2_LUT6|MUX2_LUT7|MUX2_LUT8|ODDR|OSC[ZFHWO]?|BUFS|RAMW|rPLL|PLLVR|IOLOGIC)(\w*)")
for cellname, cell in data['modules']['top']['cells'].items():
if cell['type'].startswith('DUMMY_') or cell['type'] in {'OSER16', 'IDES16'} or 'NEXTPNR_BEL' not in cell['attributes']:
continue
bel = cell['attributes']['NEXTPNR_BEL']
if bel in {"VCC", "GND"}: continue
if is_himbaechel and bel[-4:] in {'/GND', '/VCC'}:
continue
bels = belre.match(bel)
if not bels:
raise Exception(f"Unknown bel:{bel}")
row, col, num = bels.groups()
if is_himbaechel:
col_ = col
col = str(int(row) + 1)
row = str(int(col_) + 1)
# The differential buffer is pushed to the end of the queue for processing
# because it does not have an independent iostd, but adjusts to the normal pins
# in the bank, if any are found
if 'DIFF' in cell['attributes']:
later.append((cellname, cell, row, col, num))
continue
cell_type = cell['type']
if cell_type == 'rPLL':
cell_type = 'RPLLA'
yield from extra_pll_bels(cell, row, col, num, cellname)
yield (cell_type, int(row), int(col), num,
cell['parameters'], cell['attributes'], sanitize_name(cellname), cell)
# diff iobs
for cellname, cell, row, col, num in later:
yield (cell['type'], int(row), int(col), num,
cell['parameters'], cell['attributes'], sanitize_name(cellname), cell)
_pip_bels = []
def get_pips(data):
if is_himbaechel:
pipre = re.compile(r"X(\d+)Y(\d+)/([\w_]+)/([\w_]+)")
else:
pipre = re.compile(r"R(\d+)C(\d+)_([^_]+)_([^_]+)")
for net in data['modules']['top']['netnames'].values():
routing = net['attributes']['ROUTING']
pips = routing.split(';')[1::3]
for pip in pips:
res = pipre.fullmatch(pip) # ignore alias
if res:
row, col, src, dest = res.groups()
if is_himbaechel:
# XD - input of the DFF
if src.startswith('XD'):
if dest.startswith('F'):
continue
# pass-though LUT
num = dest[1]
init = {'A': '1010101010101010', 'B': '1100110011001100',
'C': '1111000011110000', 'D': '1111111100000000'}[dest[0]]
_pip_bels.append(("LUT4", int(col) + 1, int(row) + 1, num, {"INIT": init}, {}, f'$PACKER_PASS_LUT_{len(_pip_bels)}', None))
continue
yield int(col) + 1, int(row) + 1, dest, src
else:
yield int(row), int(col), src, dest
elif pip and "DUMMY" not in pip:
print("Invalid pip:", pip)
def infovaluemap(infovalue, start=2):
return {tuple(iv[:start]):iv[start:] for iv in infovalue}
# Permitted frequencies for chips
# { device : (max_in, max_out, min_out, max_vco, min_vco) }
_permitted_freqs = {
"GW1N-1": (400, 450, 3.125, 900, 400),
"GW1NZ-1": (400, 400, 3.125, 800, 400),
"GW1N-4": (400, 500, 3.125, 1000, 400),
"GW1NS-4": (400, 600, 4.6875, 1200, 600),
"GW1N-9": (400, 500, 3.125, 1000, 400),
"GW1N-9C": (400, 600, 3.125, 1200, 400),
"GW1NS-2": (400, 500, 3.125, 1200, 400),
"GW2A-18": (400, 600, 3.125, 1200, 400), # XXX check it
"GW2A-18C": (400, 600, 3.125, 1200, 400), # XXX check it
}
# input params are calculated as described in GOWIN doc (UG286-1.7E_Gowin Clock User Guide)
# fref = fclkin / idiv
# fvco = (odiv * fdiv * fclkin) / idiv
#
# returns (fclkin_idx, icp, r_idx)
# fclkin_idx - input frequency range index
# icp - charge current
# r_idx - resistor value index
# There are not many resistors so the whole frequency range is divided into
# 30MHz intervals and the number of this interval is one of the fuse sets. But
# the resistor itself is not directly dependent on the input frequency.
_freq_R = [[(2.6, 65100.0), (3.87, 43800.0), (7.53, 22250.0), (14.35, 11800.0), (28.51, 5940.0), (57.01, 2970.0), (114.41, 1480), (206.34, 820.0)], [(2.4, 69410.0), (3.53, 47150.0), (6.82, 24430.0), (12.93, 12880.0), (25.7, 6480.0), (51.4, 3240.0), (102.81, 1620), (187.13, 890.0)]]
def calc_pll_pump(fref, fvco):
fclkin_idx = int((fref - 1) // 30)
if (fclkin_idx == 13 and fref <= 395) or (fclkin_idx == 14 and fref <= 430) or (fclkin_idx == 15 and fref <= 465) or fclkin_idx == 16:
fclkin_idx = fclkin_idx - 1
if device not in {'GW2A-18', 'GW2A-18C'}:
freq_Ri = _freq_R[0]
else:
freq_Ri = _freq_R[1]
r_vals = [(fr[1], len(freq_Ri) - 1 - idx) for idx, fr in enumerate(freq_Ri) if fr[0] < fref]
r_vals.reverse()
# Find the resistor that provides the minimum current through the capacitor
if device not in {'GW2A-18', 'GW2A-18C'}:
K0 = (497.5 - math.sqrt(247506.25 - (2675.4 - fvco) * 78.46)) / 39.23
K1 = 4.8714 * K0 * K0 + 6.5257 * K0 + 142.67
else:
K0 = (-28.938 + math.sqrt(837.407844 - (385.07 - fvco) * 0.9892)) / 0.4846
K1 = 0.1942 * K0 * K0 - 13.173 * K0 + 518.86
Kvco = 1000000.0 * K1
Ndiv = fvco / fref
C1 = 6.69244e-11
for R1, r_idx in r_vals:
Ic = (1.8769 / (R1 * R1 * Kvco * C1)) * 4.0 * Ndiv
if Ic <= 0.00028:
icp = int(Ic * 100000.0 + 0.5) * 10
break
return ((fclkin_idx + 1) * 16, icp, r_idx)
# add the default pll attributes according to the documentation
_default_pll_inattrs = {
'FCLKIN' : '100.00',
'IDIV_SEL' : '0',
'DYN_IDIV_SEL' : 'false',
'FBDIV_SEL' : '00000000000000000000000000000000',
'DYN_FBDIV_SEL' : 'false',
'ODIV_SEL' : '00000000000000000000000000001000',
'DYN_ODIV_SEL' : 'false',
'PSDA_SEL' : '0000 ', # XXX extra space for compatibility, but it will work with or without it in the future
'DUTYDA_SEL' : '1000 ', # ^^^
'DYN_DA_EN' : 'false',
'CLKOUT_FT_DIR' : '1',
'CLKOUT_DLY_STEP': '00000000000000000000000000000000',
'CLKOUTP_FT_DIR': '1',
'CLKOUTP_DLY_STEP': '00000000000000000000000000000000',
'DYN_SDIV_SEL' : '00000000000000000000000000000010',
'CLKFB_SEL' : 'internal',
'CLKOUTD_SRC' : 'CLKOUT',
'CLKOUTD3_SRC' : 'CLKOUT',
'CLKOUT_BYPASS' : 'false',
'CLKOUTP_BYPASS': 'false',
'CLKOUTD_BYPASS': 'false',
'DEVICE' : 'GW1N-1'
}
_default_pll_internal_attrs = {
'INSEL': 'CLKIN1',
'FBSEL': 'CLKFB3',
'PLOCK': 'ENABLE',
'FLOCK': 'ENABLE',
'FLTOP': 'ENABLE',
'GMCMODE': 15,
'CLKOUTDIV3': 'ENABLE',
'CLKOUTDIV': 'ENABLE',
'CLKOUTPS': 'ENABLE',
'PDN': 'ENABLE',
'PASEL': 0,
'IRSTEN': 'DISABLE',
'SRSTEN': 'DISABLE',
'PWDEN': 'ENABLE',
'RSTEN': 'ENABLE',
'FLDCOUNT': 16,
'GMCGAIN': 0,
'LPR': 'R4',
'ICPSEL': 50,
}
def add_pll_default_attrs(attrs):
pll_inattrs = attrs.copy()
for k, v in _default_pll_inattrs.items():
if k in pll_inattrs:
continue
pll_inattrs[k] = v
return pll_inattrs
# typ - PLL type (RPLL, etc)
def set_pll_attrs(db, typ, idx, attrs):
pll_inattrs = add_pll_default_attrs(attrs)
pll_attrs = _default_pll_internal_attrs.copy()
if typ not in {'RPLL', 'PLLVR'}:
raise Exception(f"PLL type {typ} is not supported for now")
if typ == 'PLLVR':
pll_attrs[['PLLVCC0', 'PLLVCC1'][idx]] = 'ENABLE'
# parse attrs
for attr, val in pll_inattrs.items():
if attr in pll_attrs:
pll_attrs[attr] = val
if attr == 'CLKOUTD_SRC':
if val == 'CLKOUTP':
pll_attrs['CLKOUTDIVSEL'] = 'CLKOUTPS'
continue
if attr == 'CLKOUTD3_SRC':
if val == 'CLKOUTP':
pll_attrs['CLKOUTDIV3SEL'] = 'CLKOUTPS'
continue
if attr == 'DYN_IDIV_SEL':
if val == 'true':
pll_attrs['IDIVSEL'] = 'DYN'
continue
if attr == 'DYN_FBDIV_SEL':
if val == 'true':
pll_attrs['FDIVSEL'] = 'DYN'
continue
if attr == 'DYN_ODIV_SEL':
if val == 'true':
pll_attrs['ODIVSEL'] = 'DYN'
continue
if attr == 'CLKOUT_BYPASS':
if val == 'true':
pll_attrs['BYPCK'] = 'BYPASS'
continue
if attr == 'CLKOUTP_BYPASS':
if val == 'true':
pll_attrs['BYPCKPS'] = 'BYPASS'
continue
if attr == 'CLKOUTD_BYPASS':
if val == 'true':
pll_attrs['BYPCKDIV'] = 'BYPASS'
continue
if attr == 'IDIV_SEL':
idiv = 1 + int(val, 2)
pll_attrs['IDIV'] = idiv
continue
if attr == 'FBDIV_SEL':
fbdiv = 1 + int(val, 2)
pll_attrs['FDIV'] = fbdiv
continue
if attr == 'DYN_SDIV_SEL':
pll_attrs['SDIV'] = int(val, 2)
continue
if attr == 'ODIV_SEL':
odiv = int(val, 2)
pll_attrs['ODIV'] = odiv
continue
if attr == 'DYN_DA_EN':
if val == 'true':
pll_attrs['DPSEL'] = 'DYN'
pll_attrs['DUTY'] = 0
pll_attrs['PHASE'] = 0
pll_attrs['PASEL'] = 'DISABLE'
# steps in 50ps
tmp_val = int(pll_inattrs['CLKOUT_DLY_STEP'], 2) * 50
pll_attrs['OPDLY'] = tmp_val
# XXX here is unclear according to the documentation only three
# values are allowed: 0, 1 and 2, but there are 4 fuses (0, 50,
# 75, 100). Find out what to do with 75
tmp_val = int(pll_inattrs['CLKOUTP_DLY_STEP'], 2) * 50
pll_attrs['OSDLY'] = tmp_val
else:
pll_attrs['OSDLY'] = 'DISABLE'
pll_attrs['OPDLY'] = 'DISABLE'
phase_val = int(pll_inattrs['PSDA_SEL'].strip(), 2)
pll_attrs['PHASE'] = phase_val
duty_val = int(pll_inattrs['DUTYDA_SEL'].strip(), 2)
# XXX there are fuses for 15 variants (excluding 0) so for now
# we will implement all of them, including those prohibited by
# documentation 1 and 15
if (phase_val + duty_val) < 16:
duty_val = phase_val + duty_val
else:
duty_val = phase_val + duty_val - 16
pll_attrs['DUTY'] = duty_val
continue
if attr == 'FCLKIN':
fclkin = float(val)
if fclkin < 3 or fclkin > _permitted_freqs[device][0]:
print(f"The {fclkin}MHz frequency is outside the permissible range of 3-{_permitted_freqs[device][0]}MHz.")
fclkin = 100.0
continue
# static vs dynamic
if pll_inattrs['DYN_IDIV_SEL'] == 'false' and pll_inattrs['DYN_FBDIV_SEL'] == 'false' and pll_inattrs['DYN_ODIV_SEL'] == 'false':
# static. We can immediately check the compatibility of the divisors
clkout = fclkin * fbdiv / idiv
if clkout <= _permitted_freqs[device][2] or clkout > _permitted_freqs[device][1]:
raise Exception(f"CLKOUT = FCLKIN*(FBDIV_SEL+1)/(IDIV_SEL+1) = {clkout}MHz not in range {_permitted_freqs[device][2]} - {_permitted_freqs[device][1]}MHz")
pfd = fclkin / idiv
if pfd < 3.0 or pfd > _permitted_freqs[device][0]:
raise Exception(f"PFD = FCLKIN/(IDIV_SEL+1) = {pfd}MHz not in range 3.0 - {_permitted_freqs[device][0]}MHz")
fvco = odiv * fclkin * fbdiv / idiv
if fvco < _permitted_freqs[device][4] or fvco > _permitted_freqs[device][3]:
raise Exception(f"VCO = FCLKIN*(FBDIV_SEL+1)*ODIV_SEL/(IDIV_SEL+1) = {fvco}MHz not in range {_permitted_freqs[device][4]} - {_permitted_freqs[device][3]}MHz")
# pump
fref = fclkin / idiv
fvco = (odiv * fbdiv * fclkin) / idiv
fclkin_idx, icp, r_idx = calc_pll_pump(fref, fvco)
pll_attrs['FLDCOUNT'] = fclkin_idx
pll_attrs['ICPSEL'] = int(icp)
pll_attrs['LPR'] = f"R{r_idx}"
fin_attrs = set()
for attr, val in pll_attrs.items():
if isinstance(val, str):
val = attrids.pll_attrvals[val]
add_attr_val(db, 'PLL', fin_attrs, attrids.pll_attrids[attr], val)
return fin_attrs
def set_osc_attrs(db, typ, params):
osc_attrs = dict()
for param, val in params.items():
if param == 'FREQ_DIV':
fdiv = int(val, 2)
if fdiv % 2 == 1:
raise Exception(f"Divisor of {typ} must be even")
osc_attrs['MCLKCIB'] = fdiv
osc_attrs['MCLKCIB_EN'] = "ENABLE"
osc_attrs['NORMAL'] = "ENABLE"
if typ not in {'OSC', 'OSCW'}:
osc_attrs['USERPOWER_SAVE'] = 'ENABLE'
continue
if param == 'REGULATOR_EN':
reg = int(val, 2)
if reg == 1:
osc_attrs['OSCREG'] = "ENABLE"
continue
fin_attrs = set()
for attr, val in osc_attrs.items():
if isinstance(val, str):
val = attrids.osc_attrvals[val]
add_attr_val(db, 'OSC', fin_attrs, attrids.osc_attrids[attr], val)
return fin_attrs
_iologic_default_attrs = {
'DUMMY': {},
'IOLOGIC': {},
'IOLOGIC_DUMMY': {},
'ODDR': { 'TXCLK_POL': '0'},
'ODDRC': { 'TXCLK_POL': '0'},
'OSER4': { 'GSREN': 'false', 'LSREN': 'true', 'TXCLK_POL': '0', 'HWL': 'false'},
'OSER8': { 'GSREN': 'false', 'LSREN': 'true', 'TXCLK_POL': '0', 'HWL': 'false'},
'OSER10': { 'GSREN': 'false', 'LSREN': 'true'},
'OSER16': { 'GSREN': 'false', 'LSREN': 'true', 'CLKOMUX': 'ENABLE'},
'OVIDEO': { 'GSREN': 'false', 'LSREN': 'true'},
'IDES4': { 'GSREN': 'false', 'LSREN': 'true'},
'IDES8': { 'GSREN': 'false', 'LSREN': 'true'},
'IDES10': { 'GSREN': 'false', 'LSREN': 'true'},
'IVIDEO': { 'GSREN': 'false', 'LSREN': 'true'},
'IDDR' : {'CLKIMUX': 'ENABLE', 'LSRIMUX_0': '0', 'LSROMUX_0': '0'},
'IDDRC' : {'CLKIMUX': 'ENABLE', 'LSRIMUX_0': '1', 'LSROMUX_0': '0'},
'IDES16': { 'GSREN': 'false', 'LSREN': 'true', 'CLKIMUX': 'ENABLE'},
}
def iologic_mod_attrs(attrs):
if 'TXCLK_POL' in attrs:
if int(attrs['TXCLK_POL']) == 0:
attrs['TSHX'] = 'SIG'
else:
attrs['TSHX'] = 'INV'
del attrs['TXCLK_POL']
if 'HWL' in attrs:
if attrs['HWL'] == 'true':
attrs['UPDATE'] = 'SAME'
del attrs['HWL']
if 'GSREN' in attrs:
if attrs['GSREN'] == 'true':
attrs['GSR'] = 'ENGSR'
del attrs['GSREN']
# XXX ignore for now
attrs.pop('LSREN', None)
attrs.pop('Q0_INIT', None)
attrs.pop('Q1_INIT', None)
def set_iologic_attrs(db, attrs, param):
in_attrs = _iologic_default_attrs[param['IOLOGIC_TYPE']].copy()
in_attrs.update(attrs)
iologic_mod_attrs(in_attrs)
fin_attrs = set()
if 'OUTMODE' in attrs:
if attrs['OUTMODE'] != 'ODDRX1':
in_attrs['CLKODDRMUX_WRCLK'] = 'ECLK0'
if attrs['OUTMODE'] != 'ODDRX1' or param['IOLOGIC_TYPE'] == 'ODDRC':
in_attrs['LSROMUX_0'] = '1'
else:
in_attrs['LSROMUX_0'] = '0'
in_attrs['CLKODDRMUX_ECLK'] = 'UNKNOWN'
if param['IOLOGIC_FCLK'] in {'SPINE12', 'SPINE13'}:
in_attrs['CLKODDRMUX_ECLK'] = 'ECLK1'
elif param['IOLOGIC_FCLK'] in {'SPINE10', 'SPINE11'}:
in_attrs['CLKODDRMUX_ECLK'] = 'ECLK0'
if attrs['OUTMODE'] == 'ODDRX8' or attrs['OUTMODE'] == 'DDRENABLE16':
in_attrs['LSROMUX_0'] = '0'
if attrs['OUTMODE'] == 'DDRENABLE16':
in_attrs['OUTMODE'] = 'DDRENABLE'
in_attrs['ISI'] = 'ENABLE'
if attrs['OUTMODE'] == 'DDRENABLE':
in_attrs['ISI'] = 'ENABLE'
in_attrs['LSRIMUX_0'] = '0'
in_attrs['CLKOMUX'] = 'ENABLE'
#in_attrs['LSRMUX_LSR'] = 'INV'
if 'INMODE' in attrs:
if param['IOLOGIC_TYPE'] not in {'IDDR', 'IDDRC'}:
#in_attrs['CLKODDRMUX_WRCLK'] = 'ECLK0'
in_attrs['CLKOMUX_1'] = '1'
in_attrs['CLKODDRMUX_ECLK'] = 'UNKNOWN'
if param['IOLOGIC_FCLK'] in {'SPINE12', 'SPINE13'}:
in_attrs['CLKIDDRMUX_ECLK'] = 'ECLK1'
elif param['IOLOGIC_FCLK'] in {'SPINE10', 'SPINE11'}:
in_attrs['CLKIDDRMUX_ECLK'] = 'ECLK0'
in_attrs['LSRIMUX_0'] = '1'
if attrs['INMODE'] == 'IDDRX8' or attrs['INMODE'] == 'DDRENABLE16':
in_attrs['LSROMUX_0'] = '0'
if attrs['INMODE'] == 'DDRENABLE16':
in_attrs['INMODE'] = 'DDRENABLE'
in_attrs['ISI'] = 'ENABLE'
if attrs['INMODE'] == 'DDRENABLE':
in_attrs['ISI'] = 'ENABLE'
in_attrs['LSROMUX_0'] = '0'
in_attrs['CLKIMUX'] = 'ENABLE'
for k, val in in_attrs.items():
if k not in attrids.iologic_attrids:
print(f'XXX IOLOGIC: add {k} key handle')
else:
add_attr_val(db, 'IOLOGIC', fin_attrs, attrids.iologic_attrids[k], attrids.iologic_attrvals[val])
return fin_attrs
_iostd_alias = {
frozenset({"BLVDS25E"}): "BLVDS_E",
frozenset({"LVTTL33"}): "LVCMOS33",
frozenset({"LVCMOS12D", "LVCMOS15D", "LVCMOS18D", "LVCMOS25D", "LVCMOS33D", }): "LVCMOS_D",
frozenset({"HSTL15", "HSTL18_I", "HSTL18_II"}): "HSTL",
frozenset({"SSTL15", "SSTL18_I", "SSTL18_II", "SSTL25_I", "SSTL25_II", "SSTL33_I", "SSTL33_II"}): "SSTL",
frozenset({"MLVDS25E"}): "MLVDS_E",
frozenset({"SSTL15D", "SSTL18D_I", "SSTL18D_II", "SSTL25D_I", "SSTL25D_II", "SSTL33D_I", "SSTL33D_II"}): "SSTL_D",
frozenset({"HSTL15D", "HSTL18D_I", "HSTL18D_II"}): "HSTL_D",
frozenset({"RSDS"}): "RSDS25",
frozenset({"RSDS25E"}): "RSDS_E",
}
def get_iostd_alias(iostd):
for k, v in _iostd_alias.items():
if iostd in k:
iostd = v
break
return iostd
# For each bank, remember the Bels used, mark whether Outs were among them and the standard.
class BankDesc:
def __init__(self, iostd, inputs_only, bels_tiles, true_lvds_drive):
self.iostd = iostd
self.inputs_only = inputs_only
self.bels_tiles = bels_tiles
self.true_lvds_drive = true_lvds_drive
_banks = {}
# IO encode in two passes: the first collect the IO attributes and place them
# according to the banks, the second after processing actually forms the fuses.
class IOBelDesc:
def __init__(self, row, col, idx, attrs, flags, connections):
self.pos = (row, col, idx)
self.attrs = attrs # standard attributes
self.flags = flags # aux special flags
self.connections = connections
_io_bels = {}
_default_iostd = {
'IBUF': 'LVCMOS18', 'OBUF': 'LVCMOS18', 'TBUF': 'LVCMOS18', 'IOBUF': 'LVCMOS18',
'TLVDS_IBUF': 'LVDS25', 'TLVDS_OBUF': 'LVDS25', 'TLVDS_TBUF': 'LVDS25',
'TLVDS_IOBUF': 'LVDS25',
'ELVDS_IBUF': 'LVCMOS33D', 'ELVDS_OBUF': 'LVCMOS33D', 'ELVDS_TBUF': 'LVCMOS33D',
'ELVDS_IOBUF': 'LVCMOS33D',
}
_vcc_ios = {'LVCMOS12': '1.2', 'LVCMOS15': '1.5', 'LVCMOS18': '1.8', 'LVCMOS25': '2.5',
'LVCMOS33': '3.3', 'LVDS25': '2.5', 'LVCMOS33D': '3.3', 'LVCMOS_D': '3.3'}
_init_io_attrs = {
'IBUF': {'PADDI': 'PADDI', 'HYSTERESIS': 'NONE', 'PULLMODE': 'UP', 'SLEWRATE': 'SLOW',
'DRIVE': '0', 'CLAMP': 'OFF', 'OPENDRAIN': 'OFF', 'DIFFRESISTOR': 'OFF',
'VREF': 'OFF', 'LVDS_OUT': 'OFF'},
'OBUF': {'ODMUX_1': '1', 'PULLMODE': 'UP', 'SLEWRATE': 'FAST',
'DRIVE': '8', 'HYSTERESIS': 'NONE', 'CLAMP': 'OFF', 'DIFFRESISTOR': 'OFF',
'SINGLERESISTOR': 'OFF', 'VCCIO': '1.8', 'LVDS_OUT': 'OFF', 'DDR_DYNTERM': 'NA', 'TO': 'INV', 'OPENDRAIN': 'OFF'},
'TBUF': {'ODMUX_1': 'UNKNOWN', 'PULLMODE': 'UP', 'SLEWRATE': 'FAST',
'DRIVE': '8', 'HYSTERESIS': 'NONE', 'CLAMP': 'OFF', 'DIFFRESISTOR': 'OFF',
'SINGLERESISTOR': 'OFF', 'VCCIO': '1.8', 'LVDS_OUT': 'OFF', 'DDR_DYNTERM': 'NA',
'TO': 'INV', 'PERSISTENT': 'OFF', 'ODMUX': 'TRIMUX'},
'IOBUF': {'ODMUX_1': 'UNKNOWN', 'PULLMODE': 'UP', 'SLEWRATE': 'FAST',
'DRIVE': '8', 'HYSTERESIS': 'NONE', 'CLAMP': 'OFF', 'DIFFRESISTOR': 'OFF',
'SINGLERESISTOR': 'OFF', 'VCCIO': '1.8', 'LVDS_OUT': 'OFF', 'DDR_DYNTERM': 'NA',
'TO': 'INV', 'PERSISTENT': 'OFF', 'ODMUX': 'TRIMUX', 'PADDI': 'PADDI', 'OPENDRAIN': 'OFF'},
}
_refine_attrs = {'SLEW_RATE': 'SLEWRATE', 'PULL_MODE': 'PULLMODE', 'OPEN_DRAIN': 'OPENDRAIN'}
def refine_io_attrs(attr):
return _refine_attrs.get(attr, attr)
def place_lut(db, tiledata, tile, parms, num):
lutmap = tiledata.bels[f'LUT{num}'].flags
init = str(parms['INIT'])
init = init*(16//len(init))
for bitnum, lutbit in enumerate(init[::-1]):
if lutbit == '0':
fuses = lutmap[bitnum]
for brow, bcol in fuses:
tile[brow][bcol] = 1
def place_alu(db, tiledata, tile, parms, num):
lutmap = tiledata.bels[f'LUT{num}'].flags
alu_bel = tiledata.bels[f"ALU{num}"]
mode = str(parms['ALU_MODE'])
for r_c in lutmap.values():
for r, c in r_c:
tile[r][c] = 0
if mode in alu_bel.modes:
bits = alu_bel.modes[mode]
else:
bits = alu_bel.modes[str(int(mode, 2))]
for r, c in bits:
tile[r][c] = 1
def place_dff(db, tiledata, tile, parms, num, mode):
dff_attrs = set()
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids['REGMODE'], attrids.cls_attrvals['FF'])
# REG0_REGSET and REG1_REGSET select set/reset or preset/clear options for each DFF individually
if mode in {'DFFR', 'DFFC', 'DFFNR', 'DFFNC', 'DFF', 'DFFN'}:
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids[f'REG{int(num) % 2}_REGSET'], attrids.cls_attrvals['RESET'])
else:
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids[f'REG{int(num) % 2}_REGSET'], attrids.cls_attrvals['SET'])
# are set/reset/clear/preset port needed?
if mode not in {'DFF', 'DFFN'}:
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids['LSRONMUX'], attrids.cls_attrvals['LSRMUX'])
# invert clock?
if mode in {'DFFN', 'DFFNR', 'DFFNC', 'DFFNP', 'DFFNS'}:
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids['CLKMUX_CLK'], attrids.cls_attrvals['INV'])
else:
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids['CLKMUX_CLK'], attrids.cls_attrvals['SIG'])
# async option?
if mode in {'DFFNC', 'DFFNP', 'DFFC', 'DFFP'}:
add_attr_val(db, 'SLICE', dff_attrs, attrids.cls_attrids['SRMODE'], attrids.cls_attrvals['ASYNC'])
dffbits = get_shortval_fuses(db, tiledata.ttyp, dff_attrs, f'CLS{int(num) // 2}')
#print(f'({row - 1}, {col - 1}) mode:{mode}, num{num}, attrs:{dff_attrs}, bits:{dffbits}')
for brow, bcol in dffbits:
tile[brow][bcol] = 1
def place_slice(db, tiledata, tile, parms, num):
lutmap = tiledata.bels[f'LUT{num}'].flags
if 'ALU_MODE' in parms:
place_alu(db, tiledata, tile, parms, num)
else:
place_lut(db, tiledata, tile, parms, num)
if int(num) < 6 and int(parms['FF_USED'], 2):
mode = str(parms['FF_TYPE']).strip('E')
place_dff(db, tiledata, tile, parms, num, mode)
_sides = "AB"
def place(db, tilemap, bels, cst, args):
for typ, row, col, num, parms, attrs, cellname, cell in bels:
tiledata = db.grid[row-1][col-1]
tile = tilemap[(row-1, col-1)]
if typ in {'IBUF', 'OBUF', 'TBUF', 'IOBUF'}:
if typ == 'IBUF':
parms['OUTPUT_USED'] = "0"
parms['INPUT_USED'] = "1"
parms['ENABLE_USED'] = "0"
elif typ == 'TBUF':
parms['OUTPUT_USED'] = "1"
parms['INPUT_USED'] = "0"
parms['ENABLE_USED'] = "1"
elif typ == 'IOBUF':
parms['OUTPUT_USED'] = "1"
parms['INPUT_USED'] = "1"
parms['ENABLE_USED'] = "1"
else:
parms['OUTPUT_USED'] = "1"
parms['INPUT_USED'] = "0"
parms['ENABLE_USED'] = "0"
typ = 'IOB'
if is_himbaechel and typ in {'IOLOGIC', 'IOLOGIC_DUMMY', 'ODDR', 'ODDRC', 'OSER4', 'OSER8', 'OSER10', 'OVIDEO',
'IDDR', 'IDDRC', 'IDES4', 'IDES8', 'IDES10', 'IVIDEO'}:
if typ == 'IOLOGIC_DUMMY':
attrs['IOLOGIC_FCLK'] = pnr['modules']['top']['cells'][attrs['MAIN_CELL']]['attributes']['IOLOGIC_FCLK']
attrs['IOLOGIC_TYPE'] = typ
if typ not in {'IDDR', 'IDDRC', 'ODDR', 'ODDRC'}:
# We clearly distinguish between the HCLK wires and clock
# spines at the nextpnr level by name, but in the fuse tables
# they have the same number, this is possible because the clock
# spines never go along the edges of the chip where the HCLK
# wires are.
recode_spines = {'UNKNOWN': 'UNKNOWN', 'HCLK_OUT0': 'SPINE10',
'HCLK_OUT1': 'SPINE11', 'HCLK_OUT2': 'SPINE12',
'HCLK_OUT3': 'SPINE13'}
if attrs['IOLOGIC_FCLK'] in recode_spines:
attrs['IOLOGIC_FCLK'] = recode_spines[attrs['IOLOGIC_FCLK']]
else:
attrs['IOLOGIC_FCLK'] = 'UNKNOWN'
typ = 'IOLOGIC'
if typ == "GSR":
pass
elif typ.startswith('MUX2_'):
pass
elif typ == "BUFS":
# fuses must be reset in order to activate so remove them
bits2zero = set()
for fuses in [fuses for fuses in parms.keys() if fuses in {'L', 'R'}]:
bits2zero.update(tiledata.bels[f'BUFS{num}'].flags[fuses])
for r, c in bits2zero:
tile[r][c] = 0
elif typ in {'OSC', 'OSCZ', 'OSCF', 'OSCH', 'OSCW', 'OSCO'}:
# XXX turn on (GW1NZ-1)
if device == 'GW1NZ-1':
en_tiledata = db.grid[db.rows - 1][db.cols - 1]
en_tile = tilemap[(db.rows - 1, db.cols - 1)]
en_tile[23][63] = 0
# clear powersave fuses
clear_attrs = set()
add_attr_val(db, 'OSC', clear_attrs, attrids.osc_attrids['POWER_SAVE'], attrids.osc_attrvals['ENABLE'])
bits = get_shortval_fuses(db, tiledata.ttyp, clear_attrs, 'OSC')
for r, c in bits:
tile[r][c] = 0
osc_attrs = set_osc_attrs(db, typ, parms)
bits = get_shortval_fuses(db, tiledata.ttyp, osc_attrs, 'OSC')
for r, c in bits:
tile[r][c] = 1
elif typ == "SLICE":
place_slice(db, tiledata, tile, parms, num)
elif typ.startswith("DFF"):
mode = typ.strip('E')
place_dff(db, tiledata, tile, parms, num, mode)
elif typ.startswith('LUT'):
place_lut(db, tiledata, tile, parms, num)
elif typ.startswith('ALU'):
place_alu(db, tiledata, tile, parms, num)
elif typ[:3] == "IOB":
edge = 'T'
idx = col
if row == db.rows:
edge = 'B'
elif col == 1:
edge = 'L'
idx = row
elif col == db.cols:
edge = 'R'
idx = row
bel_name = f"IO{edge}{idx}{num}"
cst.ports[cellname] = bel_name
iob = tiledata.bels[f'IOB{num}']
if 'DIFF' in parms:
# skip negative pin for lvds
if parms['DIFF'] == 'N':
continue
# valid pin?
if not iob.is_diff:
raise ValueError(f"Cannot place {cellname} at {bel_name} - not a diff pin")
if not iob.is_diff_p:
raise ValueError(f"Cannot place {cellname} at {bel_name} - not a P pin")
mode = parms['DIFF_TYPE']
if iob.is_true_lvds and mode[0] != 'T':
raise ValueError(f"Cannot place {cellname} at {bel_name} - it is a true lvds pin")
if not iob.is_true_lvds and mode[0] == 'T':
raise ValueError(f"Cannot place {cellname} at {bel_name} - it is an emulated lvds pin")
else:
if int(parms["ENABLE_USED"], 2):
if int(parms["INPUT_USED"], 2):
mode = "IOBUF"
else:
mode = "TBUF"
elif int(parms["INPUT_USED"], 2):
mode = "IBUF"
elif int(parms["OUTPUT_USED"], 2):
mode = "OBUF"
else:
raise ValueError("IOB has no in or output")
pinless_io = False
try:
bank = chipdb.loc2bank(db, row - 1, col - 1)
iostd = _banks.setdefault(bank, BankDesc(None, True, [], None)).iostd
except KeyError:
if not args.allow_pinless_io:
raise Exception(f"IO{edge}{idx}{num} is not allowed for a given package")
pinless_io = True
iostd = None
flags = {'mode': mode}
flags.update({port: net for port, net in parms.items() if port.startswith('NET_')})
if int(parms.get("IOLOGIC_IOB", "0")):
flags['USED_BY_IOLOGIC'] = True
io_desc = _io_bels.setdefault(bank, {})[bel_name] = IOBelDesc(row - 1, col - 1, num, {}, flags, cell['connections'])
# find io standard
iostd = _default_iostd[mode]
io_desc.attrs['IO_TYPE'] = iostd
for flag in attrs.keys():
flag_name_val = flag.split("=")
if len(flag_name_val) < 2:
continue
if flag[0] != chipdb.mode_attr_sep:
continue
if flag_name_val[0] == chipdb.mode_attr_sep + "IO_TYPE":
iostd = _iostd_alias.get(flag_name_val[1], flag_name_val[1])
else:
io_desc.attrs[flag_name_val[0][1:]] = flag_name_val[1]
io_desc.attrs['IO_TYPE'] = iostd
if pinless_io:
return
elif typ.startswith("RAM16SDP") or typ == "RAMW":
ram_attrs = set()
add_attr_val(db, 'SLICE', ram_attrs, attrids.cls_attrids['MODE'], attrids.cls_attrvals['SSRAM'])
rambits = get_shortval_fuses(db, tiledata.ttyp, ram_attrs, 'CLS3')
# In fact, the WRE signal is considered active when it is low, so
# we include an inverter on the LSR2 line here to comply with the
# documentation
add_attr_val(db, 'SLICE', ram_attrs, attrids.cls_attrids['LSR_MUX_1'], attrids.cls_attrvals['0'])
add_attr_val(db, 'SLICE', ram_attrs, attrids.cls_attrids['LSR_MUX_LSR'], attrids.cls_attrvals['INV'])
rambits.update(get_shortval_fuses(db, tiledata.ttyp, ram_attrs, 'CLS2'))
#print(f'({row - 1}, {col - 1}) attrs:{ram_attrs}, bits:{rambits}')
for brow, bcol in rambits:
tile[brow][bcol] = 1
elif typ == 'IOLOGIC':
#print(row, col, cellname)
iologic_attrs = set_iologic_attrs(db, parms, attrs)
bits = set()
table_type = f'IOLOGIC{num}'
bits = get_shortval_fuses(db, tiledata.ttyp, iologic_attrs, table_type)
for r, c in bits:
tile[r][c] = 1
elif typ.startswith('RPLL'):
pll_attrs = set_pll_attrs(db, 'RPLL', 0, parms)
bits = set()
if 'PLL' in db.shortval[tiledata.ttyp]:
bits = get_shortval_fuses(db, tiledata.ttyp, pll_attrs, 'PLL')
#print(typ, tiledata.ttyp, bits)
for r, c in bits:
tile[r][c] = 1
elif typ == 'PLLVR':
idx = 0
if col != 28:
idx = 1
pll_attrs = set_pll_attrs(db, 'PLLVR', idx, parms)
bits = get_shortval_fuses(db, tiledata.ttyp, pll_attrs, 'PLL')
#print(typ, bits)
for r, c in bits:
tile[r][c] = 1
# only for 4C, we know exactly where CFG is
cfg_type = 51
bits = get_shortval_fuses(db, cfg_type, pll_attrs, 'PLL')
cfg_tile = tilemap[(0, 37)]
for r, c in bits:
cfg_tile[r][c] = 1
else:
print("unknown type", typ)
# second IO pass
for bank, ios in _io_bels.items():
# check IO standard
vccio = None
iostd = None
for iob in ios.values():
# diff io can't be placed at simplified io
if iob.pos[0] in db.simplio_rows:
if iob.flags['mode'].startswith('ELVDS') or iob.flags['mode'].startswith('TLVDS'):
raise Exception(f"Differential IO cant be placed at special row {iob.pos[0]}")
if iob.flags['mode'] in {'IBUF', 'IOBUF', 'TLVDS_IBUF', 'TLVDS_IOBUF', 'ELVDS_IBUF', 'ELVDS_IOBUF'}:
iob.attrs['IO_TYPE'] = get_iostd_alias(iob.attrs['IO_TYPE'])
if iob.attrs.get('SINGLERESISTOR', 'OFF') != 'OFF':
iob.attrs['DDR_DYNTERM'] = 'ON'
if iob.flags['mode'] in {'OBUF', 'IOBUF', 'TLVDS_IOBUF', 'ELVDS_IOBUF'}:
if not vccio:
iostd = iob.attrs['IO_TYPE']
vccio = _vcc_ios[iostd]
elif vccio != _vcc_ios[iob.attrs['IO_TYPE']] and not iostd.startswith('LVDS') and not iob.attrs['IO_TYPE'].startswith('LVDS'):
snd_type = iob.attrs['IO_TYPE']
fst = [name for name, iob in ios.items() if iob.attrs['IO_TYPE'] == iostd][0]
snd = [name for name, iob in ios.items() if iob.attrs['IO_TYPE'] == snd_type][0]
raise Exception(f"Different IO standard for bank {bank}: {fst} sets {iostd}, {snd} sets {iob.attrs['IO_TYPE']}.")
if not vccio:
iostd = 'LVCMOS12'
in_bank_attrs = {}
in_bank_attrs['VCCIO'] = _vcc_ios[iostd]
# set io bits
for name, iob in ios.items():
row, col, idx = iob.pos
tiledata = db.grid[row][col]
mode_for_attrs = iob.flags['mode']
lvds_attrs = {}
if mode_for_attrs.startswith('TLVDS_') or mode_for_attrs.startswith('ELVDS_'):
mode_for_attrs = mode_for_attrs[6:]
lvds_attrs = {'HYSTERESIS': 'NA', 'PULLMODE': 'NONE', 'OPENDRAIN': 'OFF'}
in_iob_attrs = _init_io_attrs[mode_for_attrs].copy()
in_iob_attrs.update(lvds_attrs)
# constant OEN connections lead to the use of special fuses
if iob.flags['mode'] not in {'IBUF', 'TLVDS_IBUF', 'ELVDS_IBUF'}:
if iob_is_connected(iob.flags, 'OEN'):
if iob_is_gnd_net(iob.flags, 'OEN'):
in_iob_attrs['TRIMUX_PADDT'] = 'SIG'
elif iob_is_vcc_net(iob.flags, 'OEN'):
in_iob_attrs['ODMUX_1'] = '0'
else:
in_iob_attrs['TRIMUX_PADDT'] = 'SIG'
in_iob_attrs['TO'] = 'SIG'
else:
in_iob_attrs['ODMUX_1'] = '1'
#
for k, val in iob.attrs.items():
k = refine_io_attrs(k)
in_iob_attrs[k] = val
in_iob_attrs['VCCIO'] = in_bank_attrs['VCCIO']
#print(in_iob_attrs)
# lvds
if iob.flags['mode'] in {'TLVDS_OBUF', 'TLVDS_TBUF', 'TLVDS_IOBUF'}:
in_iob_attrs.update({'LVDS_OUT': 'ON', 'ODMUX_1': 'UNKNOWN', 'ODMUX': 'TRIMUX',
'SLEWRATE': 'FAST', 'DRIVE': '0', 'PERSISTENT': 'OFF'})
elif iob.flags['mode'] in {'ELVDS_OBUF', 'ELVDS_TBUF', 'ELVDS_IOBUF'}:
in_iob_attrs.update({'ODMUX_1': 'UNKNOWN', 'ODMUX': 'TRIMUX',
'PERSISTENT': 'OFF'})
in_iob_attrs['IO_TYPE'] = get_iostd_alias(in_iob_attrs['IO_TYPE'])
if iob.flags['mode'] in {'TLVDS_IBUF', 'ELVDS_IBUF'}:
in_iob_attrs['ODMUX_1'] = 'UNKNOWN'
in_iob_attrs.pop('VCCIO', None)
# XXX may be here do GW9 pins also
if device == 'GW1N-1':
if row == 5 and mode_for_attrs == 'OBUF':
in_iob_attrs['TO'] = 'UNKNOWN'
if device not in {'GW1N-4', 'GW1NS-4'}:
if mode[1:].startswith('LVDS') and in_iob_attrs['DRIVE'] != '0':
in_iob_attrs['DRIVE'] = 'UNKNOWN'
in_iob_b_attrs = {}
if iob.flags['mode'] in {'TLVDS_OBUF', 'TLVDS_TBUF', 'TLVDS_IOBUF'}:
in_iob_b_attrs = in_iob_attrs.copy()
elif iob.flags['mode'] in {'TLVDS_IBUF', 'ELVDS_IBUF'}:
in_iob_b_attrs = in_iob_attrs.copy()
if iob.flags['mode'] in {'ELVDS_IBUF'}:
in_iob_attrs['PULLMODE'] = 'UP'
in_iob_b_attrs['PULLMODE'] = 'NONE'
in_iob_b_attrs['IO_TYPE'] = in_iob_attrs.get('IO_TYPE', 'UNKNOWN')
in_iob_b_attrs['DIFFRESISTOR'] = in_iob_attrs.get('DIFFRESISTOR', 'OFF')
elif iob.flags['mode'] in {'ELVDS_OBUF', 'ELVDS_TBUF', 'ELVDS_IOBUF'}:
if iob.flags['mode'] in {'ELVDS_IOBUF'}:
in_iob_attrs['PULLMODE'] = 'UP'
in_iob_b_attrs['PULLMODE'] = 'UP'
in_iob_b_attrs = in_iob_attrs.copy()
for iob_idx, atr in [(idx, in_iob_attrs), ('B', in_iob_b_attrs)]:
#print(name, iob.pos, atr)
iob_attrs = set()
for k, val in atr.items():
if k not in attrids.iob_attrids:
print(f'XXX IO: add {k} key handle')
elif k == 'OPENDRAIN' and val == 'OFF' and 'LVDS' not in iob.flags['mode'] and 'IBUF' not in iob.flags['mode']:
continue
else:
add_attr_val(db, 'IOB', iob_attrs, attrids.iob_attrids[k], attrids.iob_attrvals[val])
if k in {'VCCIO'}:
continue
if k == 'LVDS_OUT' and val not in {'ENABLE', 'ON'}:
continue
in_bank_attrs[k] = val
bits = get_longval_fuses(db, tiledata.ttyp, iob_attrs, f'IOB{iob_idx}')
tile = tilemap[(row, col)]
for row_, col_ in bits:
tile[row_][col_] = 1
# bank bits
brow, bcol = db.bank_tiles[bank]
tiledata = db.grid[brow][bcol]
bank_attrs = set()
for k, val in in_bank_attrs.items():
#print(k, val)
if k not in attrids.iob_attrids:
print(f'XXX BANK: add {k} key handle')
else:
add_attr_val(db, 'IOB', bank_attrs, attrids.iob_attrids[k], attrids.iob_attrvals[val])
bits = get_bank_fuses(db, tiledata.ttyp, bank_attrs, 'BANK', int(bank))
btile = tilemap[(brow, bcol)]
for row, col in bits:
btile[row][col] = 1
#for k, v in _io_bels.items():
# for io, bl in v.items():
# print(k, io, vars(bl))
# The vertical columns of long wires can receive a signal from either the upper
# or the lower end of the column.
# The default source is the top end of the column, but if optimum routing has
# resulted in the bottom end of the column being used, the top end must be
# electrically disconnected by setting special fuses.
def secure_long_wires(db, tilemap, row, col, src, dest):
if device in {"GW1N-1"}:
# the column runs across the entire height of the chip from the first to the last row
check_row = db.rows
fuse_row = 0
if row == check_row and dest in {'LT02', 'LT13'}:
tiledata = db.grid[fuse_row][col - 1]
if dest in tiledata.alonenode_6:
tile = tilemap[(fuse_row, col - 1)]
_, bits = tiledata.alonenode_6[dest]
for row, col in bits:
tile[row][col] = 1
def route(db, tilemap, pips):
for row, col, src, dest in pips:
tiledata = db.grid[row-1][col-1]
tile = tilemap[(row-1, col-1)]
try:
if dest in tiledata.clock_pips:
bits = tiledata.clock_pips[dest][src]
elif is_himbaechel and (row - 1, col - 1) in db.hclk_pips and dest in db.hclk_pips[row - 1, col - 1]:
bits = db.hclk_pips[row - 1, col - 1][dest][src]
else:
bits = tiledata.pips[dest][src]
except KeyError:
print(src, dest, "not found in tile", row, col)
breakpoint()
continue
for row, col in bits:
tile[row][col] = 1
def header_footer(db, bs, compress):
"""
Generate fs header and footer
Currently limited to checksum with
CRC_check and security_bit_enable set
"""
bs = np.fliplr(bs)
bs=np.packbits(bs)
# configuration data checksum is computed on all
# data in 16bit format
bb = np.array(bs)
res = int(bb[0::2].sum() * pow(2,8) + bb[1::2].sum())
checksum = res & 0xffff
if compress:
# update line 0x10 with compress enable bit
# rest (keys) is done in bslib.write_bitstream
hdr10 = int.from_bytes(db.cmd_hdr[4], 'big') | (1 << 13)
db.cmd_hdr[4] = bytearray.fromhex(f"{hdr10:016x}")
# set the checksum
db.cmd_ftr[1] = bytearray.fromhex(f"{0x0A << 56 | checksum:016x}")
def gsr(db, tilemap, args):
gsr_attrs = set()
for k, val in {'GSRMODE': 'ACTIVE_LOW'}.items():
if k not in attrids.gsr_attrids:
print(f'XXX GSR: add {k} key handle')
else:
add_attr_val(db, 'GSR', gsr_attrs, attrids.gsr_attrids[k], attrids.gsr_attrvals[val])
cfg_attrs = set()
for k, val in {'GSR': 'USED'}.items():
if k not in attrids.cfg_attrids:
print(f'XXX CFG GSR: add {k} key handle')
else:
add_attr_val(db, 'CFG', cfg_attrs, attrids.cfg_attrids[k], attrids.cfg_attrvals[val])
# The configuration fuses are described in the ['shortval'][60] table, global set/reset is
# described in the ['shortval'][20] table. Look for cells with type with these tables
gsr_type = {50, 83}
cfg_type = {50, 51}
if device in {'GW2A-18', 'GW2A-18C'}:
gsr_type = {1, 83}
cfg_type = {1, 51}
for row, rd in enumerate(db.grid):
for col, rc in enumerate(rd):
bits = set()
if rc.ttyp in gsr_type:
bits = get_shortval_fuses(db, rc.ttyp, gsr_attrs, 'GSR')
if rc.ttyp in cfg_type:
bits.update(get_shortval_fuses(db, rc.ttyp, cfg_attrs, 'CFG'))
if bits:
btile = tilemap[(row, col)]
for brow, bcol in bits:
btile[brow][bcol] = 1
def dualmode_pins(db, tilemap, args):
pin_flags = {'JTAG_AS_GPIO': 'UNKNOWN', 'SSPI_AS_GPIO': 'UNKNOWN', 'MSPI_AS_GPIO': 'UNKNOWN',
'DONE_AS_GPIO': 'UNKNOWN', 'RECONFIG_AS_GPIO': 'UNKNOWN', 'READY_AS_GPIO': 'UNKNOWN'}
if args.jtag_as_gpio:
pin_flags['JTAG_AS_GPIO'] = 'YES'
if args.sspi_as_gpio:
pin_flags['SSPI_AS_GPIO'] = 'YES'
if args.mspi_as_gpio:
pin_flags['MSPI_AS_GPIO'] = 'YES'
if args.ready_as_gpio:
pin_flags['READY_AS_GPIO'] = 'YES'
if args.done_as_gpio:
pin_flags['DONE_AS_GPIO'] = 'YES'
if args.reconfign_as_gpio:
pin_flags['RECONFIG_AS_GPIO'] = 'YES'
set_attrs = set()
clr_attrs = set()
for k, val in pin_flags.items():
if k not in attrids.cfg_attrids:
print(f'XXX CFG: add {k} key handle')
else:
add_attr_val(db, 'CFG', set_attrs, attrids.cfg_attrids[k], attrids.cfg_attrvals[val])
add_attr_val(db, 'CFG', clr_attrs, attrids.cfg_attrids[k], attrids.cfg_attrvals['YES'])
# The configuration fuses are described in the ['shortval'][60] table, here
# we are looking for cells with types that have such a table.
cfg_type = {50, 51}
if device in {'GW2A-18', 'GW2A-18C'}:
cfg_type = {1, 51}
for row, rd in enumerate(db.grid):
for col, rc in enumerate(rd):
bits = set()
clr_bits = set()
if rc.ttyp in cfg_type:
bits.update(get_shortval_fuses(db, rc.ttyp, set_attrs, 'CFG'))
clr_bits.update(get_shortval_fuses(db, rc.ttyp, clr_attrs, 'CFG'))
if clr_bits:
btile = tilemap[(row, col)]
for brow, bcol in clr_bits:
btile[brow][bcol] = 0
for brow, bcol in bits:
btile[brow][bcol] = 1
def main():
global device
global pnr
pil_available = True
try:
from PIL import Image
except ImportError:
pil_available = False
parser = argparse.ArgumentParser(description='Pack Gowin bitstream')
parser.add_argument('netlist')
parser.add_argument('-d', '--device', required=True)
parser.add_argument('-o', '--output', default='pack.fs')
parser.add_argument('-c', '--compress', action='store_true')
parser.add_argument('-s', '--cst', default = None)
parser.add_argument('--allow_pinless_io', action = 'store_true')
parser.add_argument('--jtag_as_gpio', action = 'store_true')
parser.add_argument('--sspi_as_gpio', action = 'store_true')
parser.add_argument('--mspi_as_gpio', action = 'store_true')
parser.add_argument('--ready_as_gpio', action = 'store_true')
parser.add_argument('--done_as_gpio', action = 'store_true')
parser.add_argument('--reconfign_as_gpio', action = 'store_true')
if pil_available:
parser.add_argument('--png')
args = parser.parse_args()
device = args.device
with open(args.netlist) as f:
pnr = json.load(f)
# check for new P&R
if pnr['modules']['top']['settings'].get('packer.arch', '') == 'himbaechel/gowin':
global is_himbaechel
is_himbaechel = True
# For tool integration it is allowed to pass a full part number
m = re.match("GW1N(S|Z)?[A-Z]*-(LV|UV|UX)([0-9])C?([A-Z]{2}[0-9]+P?)(C[0-9]/I[0-9])", device)
if m:
mods = m.group(1) or ""
luts = m.group(3)
device = f"GW1N{mods}-{luts}"
with importlib.resources.path('apycula', f'{args.device}.pickle') as path:
with closing(gzip.open(path, 'rb')) as f:
db = pickle.load(f)
const_nets = {'GND': '$PACKER_GND_NET', 'VCC': '$PACKER_GND_NET'}
if is_himbaechel:
const_nets = {'GND': '$PACKER_GND', 'VCC': '$PACKER_GND'}
_gnd_net = pnr['modules']['top']['netnames'].get(const_nets['GND'], {'bits': []})['bits']
_vcc_net = pnr['modules']['top']['netnames'].get(const_nets['VCC'], {'bits': []})['bits']
tilemap = chipdb.tile_bitmap(db, db.template, empty=True)
cst = codegen.Constraints()
pips = get_pips(pnr)
route(db, tilemap, pips)
bels = get_bels(pnr)
# routing can add pass-through LUTs
place(db, tilemap, itertools.chain(bels, _pip_bels) , cst, args)
gsr(db, tilemap, args)
dualmode_pins(db, tilemap, args)
# XXX Z-1 some kind of power saving for pll, disable
# When comparing images with a working (IDE) and non-working PLL (apicula),
# no differences were found in the fuses of the PLL cell itself, but a
# change in one bit in the root cell was replaced.
# If the PLL configurations match, then the assumption has been made that this
# bit simply disables it somehow.
if device in {'GW1NZ-1'}:
tile = tilemap[(db.rows - 1, db.cols - 1)]
for row, col in {(23, 63)}:
tile[row][col] = 0
res = chipdb.fuse_bitmap(db, tilemap)
header_footer(db, res, args.compress)
if pil_available and args.png:
bslib.display(args.png, res)
bslib.write_bitstream(args.output, res, db.cmd_hdr, db.cmd_ftr, args.compress)
if args.cst:
with open(args.cst, "w") as f:
cst.write(f)
if __name__ == '__main__':
main()
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/gowin_pack.py
|
gowin_pack.py
|
wirenames = { 0: "A0", 1: "B0", 2: "C0", 3: "D0", 4: "A1", 5: "B1", 6: "C1", 7: "D1", 8: "A2", 9: "B2", 10: "C2", 11: "D2", 12: "A3", 13: "B3", 14: "C3",
15: "D3", 16: "A4", 17: "B4", 18: "C4", 19: "D4", 20: "A5", 21: "B5", 22: "C5", 23: "D5", 24: "A6", 25: "B6", 26: "C6", 27: "D6", 28: "A7", 29: "B7",
30: "C7", 31: "D7", 32: "F0", 33: "F1", 34: "F2", 35: "F3", 36: "F4", 37: "F5", 38: "F6", 39: "F7", 40: "Q0", 41: "Q1", 42: "Q2", 43: "Q3", 44: "Q4",
45: "Q5", 46: "Q6", 47: "Q7", 48: "OF0", 49: "OF1", 50: "OF2", 51: "OF3", 52: "OF4", 53: "OF5", 54: "OF6", 55: "OF7", 56: "X01", 57: "X02", 58: "X03",
59: "X04", 60: "X05", 61: "X06", 62: "X07", 63: "X08", 64: "N100", 65: "SN10", 66: "SN20", 67: "N130", 68: "S100", 69: "S130", 70: "E100", 71: "EW10",
72: "EW20", 73: "E130", 74: "W100", 75: "W130", 76: "N200", 77: "N210", 78: "N220", 79: "N230", 80: "N240", 81: "N250", 82: "N260", 83: "N270", 84: "S200",
85: "S210", 86: "S220", 87: "S230", 88: "S240", 89: "S250", 90: "S260", 91: "S270", 92: "E200", 93: "E210", 94: "E220", 95: "E230", 96: "E240", 97: "E250",
98: "E260", 99: "E270", 100: "W200", 101: "W210", 102: "W220", 103: "W230", 104: "W240", 105: "W250", 106: "W260", 107: "W270", 108: "N800", 109: "N810",
110: "N820", 111: "N830", 112: "S800", 113: "S810", 114: "S820", 115: "S830", 116: "E800", 117: "E810", 118: "E820", 119: "E830", 120: "W800", 121: "W810",
122: "W820", 123: "W830", 124: "CLK0", 125: "CLK1", 126: "CLK2", 127: "LSR0", 128: "LSR1", 129: "LSR2", 130: "CE0", 131: "CE1", 132: "CE2", 133: "SEL0",
134: "SEL1", 135: "SEL2", 136: "SEL3", 137: "SEL4", 138: "SEL5", 139: "SEL6", 140: "SEL7", 141: "N101", 142: "N131", 143: "S101", 144: "S131", 145: "E101", 146: "E131",
147: "W101", 148: "W131", 149: "N201", 150: "N211", 151: "N221", 152: "N231", 153: "N241", 154: "N251", 155: "N261", 156: "N271", 157: "S201", 158: "S211",
159: "S221", 160: "S231", 161: "S241", 162: "S251", 163: "S261", 164: "S271", 165: "E201", 166: "E211", 167: "E221", 168: "E231", 169: "E241", 170: "E251",
171: "E261", 172: "E271", 173: "W201", 174: "W211", 175: "W221", 176: "W231", 177: "W241", 178: "W251", 179: "W261", 180: "W271", 181: "N202", 182: "N212",
183: "N222", 184: "N232", 185: "N242", 186: "N252", 187: "N262", 188: "N272", 189: "S202", 190: "S212", 191: "S222", 192: "S232", 193: "S242", 194: "S252",
195: "S262", 196: "S272", 197: "E202", 198: "E212", 199: "E222", 200: "E232", 201: "E242", 202: "E252", 203: "E262", 204: "E272", 205: "W202", 206: "W212",
207: "W222", 208: "W232", 209: "W242", 210: "W252", 211: "W262", 212: "W272", 213: "N804", 214: "N814", 215: "N824", 216: "N834", 217: "S804", 218: "S814",
219: "S824", 220: "S834", 221: "E804", 222: "E814", 223: "E824", 224: "E834", 225: "W804", 226: "W814", 227: "W824", 228: "W834", 229: "N808", 230: "N818",
231: "N828", 232: "N838", 233: "S808", 234: "S818", 235: "S828", 236: "S838", 237: "E808", 238: "E818", 239: "E828", 240: "E838", 241: "W808", 242: "W818",
243: "W828", 244: "W838", 245: "E110", 246: "W110", 247: "E120", 248: "W120", 249: "S110", 250: "N110", 251: "S120", 252: "N120", 253: "E111", 254: "W111",
255: "E121", 256: "W121", 257: "S111", 258: "N111", 259: "S121", 260: "N121", 261: "LB01", 262: "LB11", 263: "LB21", 264: "LB31", 265: "LB41", 266: "LB51",
267: "LB61", 268: "LB71", 269: "GB00", 270: "GB10", 271: "GB20", 272: "GB30", 273: "GB40", 274: "GB50", 275: "GB60", 276: "GB70", 277: "VCC", 278: "VSS",
279: "LT00", 280: "LT10", 281: "LT20", 282: "LT30", 283: "LT02", 284: "LT13", 285: "LT01", 286: "LT04", 287: "LBO0", 288: "LBO1", 289: "SS00", 290: "SS40",
291: "GT00", 292: "GT10", 293: "GBO0", 294: "GBO1", 295: "DI0", 296: "DI1", 297: "DI2", 298: "DI3", 299: "DI4", 300: "DI5", 301: "DI6", 302: "DI7",
303: "CIN0", 304: "CIN1", 305: "CIN2", 306: "CIN3", 307: "CIN4", 308: "CIN5", 309: "COUT0", 310: "COUT1", 311: "COUT2", 312: "COUT3", 313: "COUT4", 314: "COUT5"}
wirenames.update({n: f"LWSPINETL{n - 1001}" for n in range(1001, 1009)})
wirenames.update({n: f"LWSPINETR{n - 1009}" for n in range(1009, 1017)})
wirenames.update({n: f"LWSPINEBL{n - 1017}" for n in range(1017, 1025)})
wirenames.update({n: f"LWSPINEBR{n - 1025}" for n in range(1025, 1033)})
wirenames.update({n: f"LWSPINEB1L{n - 1033}" for n in range(1033, 1041)})
wirenames.update({n: f"LWSPINEB1R{n - 1041}" for n in range(1041, 1049)})
wirenumbers = {v: k for k, v in wirenames.items()}
clknames = wirenames.copy()
clknames.update({n: f"SPINE{n}" for n in range(32)})
clknames.update({n: f"LWT{n - 32}" for n in range(32, 40)})
clknames.update({n: f"LWB{n - 40}" for n in range(40, 48)})
# Apparently the names of the 8 primary clock wires comprise the quadrant
# number and the number of the actual clock wire: P34 stands for primary clock
# #4, 3rd quadrant. The quadrants are numbered counterclockwise:
# 2 1
# center
# 3 4
# in addition, chips with two quadrants have quadrant numbers 3 and 4, not 1
# and 2 as you might expect.
# Wires 6 and 7 are the outputs of the dynamic 4-input MUX, the assumed
# numbers of these inputs are listed below:
clknames.update({
48: 'P16A', 49: 'P16B', 50: 'P16C', 51: 'P16D',
52: 'P17A', 53: 'P17B', 54: 'P17C', 55: 'P17D',
56: 'P26A', 57: 'P26B', 58: 'P26C', 59: 'P26D',
60: 'P27A', 61: 'P27B', 62: 'P27C', 63: 'P27D',
64: 'P36A', 65: 'P36B', 66: 'P36C', 67: 'P36D',
68: 'P37A', 69: 'P37B', 70: 'P37C', 71: 'P37D',
72: 'P46A', 73: 'P46B', 74: 'P46C', 75: 'P46D',
76: 'P47A', 77: 'P47B', 78: 'P47C', 79: 'P47D'
})
clknames[80] = 'VSS'
# each PLL has 4 delay-critical outputs (clkout, clkoutp, clkoutd, clkoutd3),
# their numbers are listed here, the names indicate the possible location of
# the PLL (Top Left etc):
clknames.update({
81: 'TLPLL0CLK0', 82: 'TLPLL0CLK1', 83: 'TLPLL0CLK2', 84: 'TLPLL0CLK3',
85: 'TLPLL1CLK0', 86: 'TLPLL1CLK1', 87: 'TLPLL1CLK2', 88: 'TLPLL1CLK3',
89: 'BLPLL0CLK0', 90: 'BLPLL0CLK1', 91: 'BLPLL0CLK2', 92: 'BLPLL0CLK3',
93: 'TRPLL0CLK0', 94: 'TRPLL0CLK1', 95: 'TRPLL0CLK2', 96: 'TRPLL0CLK3',
97: 'TRPLL1CLK0', 98: 'TRPLL1CLK1', 99: 'TRPLL1CLK2', 100: 'TRPLL1CLK3',
101: 'BRPLL0CLK0', 102: 'BRPLL0CLK1', 103: 'BRPLL0CLK2', 104: 'BRPLL0CLK3',
})
clknames.update({n: f"UNK{n}" for n in range(105, 121)})
# These are the external clock pins, one on each side
clknames.update({
121: 'PCLKT0', 122: 'PCLKT1', 123: 'PCLKB0', 124: 'PCLKB1',
125: 'PCLKL0', 126: 'PCLKL1', 127: 'PCLKR0', 128: 'PCLKR1',
})
clknames.update({
129: 'TRBDCLK0', 130: 'TRBDCLK1', 131: 'TRBDCLK2', 132: 'TRBDCLK3',
133: 'TLBDCLK1', 134: 'TLBDCLK2', 135: 'TLBDCLK3', 136: 'TLBDCLK0',
137: 'BRBDCLK2', 138: 'BRBDCLK3', 139: 'BRBDCLK0', 140: 'BRBDCLK1',
141: 'BLBDCLK3', 142: 'BLBDCLK0', 143: 'BLBDCLK1', 144: 'BLBDCLK2',
145: 'TRMDCLK0', 146: 'TLMDCLK0', 147: 'BRMDCLK0', 148: 'BLMDCLK0',
149: 'BLMDCLK1', 150: 'BRMDCLK1', 151: 'TLMDCLK1', 152: 'TRMDCLK1',
})
clknames[153] = 'VCC'
clknames.update({n: f"UNK{n}" for n in range(154, 170)})
# HCLK?
clknames.update({
170: 'TBDHCLK0', 171: 'TBDHCLK1', 172: 'TBDHCLK2', 173: 'TBDHCLK3', 174: 'BBDHCLK0',
175: 'BBDHCLK1', 176: 'BBDHCLK2', 177: 'BBDHCLK3', 178: 'LBDHCLK0', 179: 'LBDHCLK1',
180: 'LBDHCLK2', 181: 'LBDHCLK3', 182: 'RBDHCLK0', 183: 'RBDHCLK1', 184: 'RBDHCLK2',
185: 'RBDHCLK3'
})
# These wires are a mystery, they are a copy of P10-P15 etc, there is no reason
# to have another number for the output, but it is these numbers that are
# listed in tables 38, although the internal routes are routed to the
# originals.
# In general they are needed and the letter A is added to make the names
# different.
clknames.update({
186: 'P10A', 187: 'P11A', 188: 'P12A', 189: 'P13A', 190: 'P14A', 191: 'P15A',
192: 'P20A', 193: 'P21A', 194: 'P22A', 195: 'P23A', 196: 'P24A', 197: 'P25A',
198: 'P30A', 199: 'P31A', 200: 'P32A', 201: 'P33A', 202: 'P34A', 203: 'P35A',
204: 'P40A', 205: 'P41A', 206: 'P42A', 207: 'P43A', 208: 'P44A', 209: 'P45A',
})
clknames.update({n: f"UNK{n}" for n in range(210, 261)})
clknumbers = {v: k for k, v in clknames.items()}
# hclk
hclknames = clknames.copy()
hclknames.update({n: f"HCLK_UNK{n}" for n in range(26)})
# inputs
hclknames.update({
2: 'HCLK_IN0', 3: 'HCLK_IN1', 4: 'HCLK_IN2', 5: 'HCLK_IN3'
})
# outputs
hclknames.update({
10: 'HCLK_OUT0', 11: 'HCLK_OUT1', 12: 'HCLK_OUT2', 13: 'HCLK_OUT3'
})
# these work as inputs in GW1N-9c
hclknames.update({
16: 'HCLK_9IN0', 17: 'HCLK_9IN1', 18: 'HCLK_9IN2', 19: 'HCLK_9IN3'
})
hclknumbers = {v: k for k, v in hclknames.items()}
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/wirenames.py
|
wirenames.py
|
import sys
import numpy as np
import random
def rint(f, w):
val = int.from_bytes(f.read(w), 'little', signed=True)
return val
def readFse(f):
print("check", rint(f, 4))
tiles = {}
ttyp = rint(f, 4)
tiles['header'] = readOneFile(f, ttyp)
while True:
ttyp = rint(f, 4)
if ttyp == 0x9a1d85: break
# print("tile type", ttyp)
tiles[ttyp] = readOneFile(f, ttyp)
return tiles
def readTable(f, size1, size2, w=2):
return [[rint(f, w) for j in range(size2)]
for i in range(size1)]
def readOneFile(f, fuselength):
tmap = {"height": rint(f, 4),
"width": rint(f, 4)}
tables = rint(f, 4)
for i in range(tables):
typ = rint(f, 4)
size = rint(f, 4)
# print(hex(f.tell()), " Table type", typ, "of size", size)
if typ == 61:
size2 = rint(f, 4)
typn = "grid"
t = readTable(f, size, size2, 4)
elif typ == 1:
typn = "fuse"
t = readTable(f, size, fuselength, 2)
elif typ in {7, 8, 9, 10, 0xb, 0xc, 0xd, 0xe, 0xf, 0x10,
0x27, 0x31, 0x34, 0x37, 0x39, 0x3b, 0x3e, 0x3f,
0x41, 0x43, 0x46, 0x48, 0x4a, 0x4c, 0x4e, 0x51, 0x53}:
typn = "logicinfo"
t = readTable(f, size, 3, 2)
elif typ in {2, 0x26, 0x30}:
typn = "wire"
t = readTable(f, size, 8, 2)
elif typ == 3:
typn = "wiresearch"
t = readTable(f, size, 3, 2)
elif typ in {5, 0x11, 0x14, 0x15, 0x16, 0x19, 0x1a, 0x1b,
0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,
0x24, 0x32, 0x33, 0x38, 0x3c, 0x40, 0x42, 0x44,
0x47, 0x49, 0x4b, 0x4d, 0x4f, 0x50, 0x52, 0x54}:
typn = "shortval"
t = readTable(f, size, 14, 2)
elif typ in {6, 0x45}:
typn = "alonenode"
t = readTable(f, size, 15, 2)
elif typ in {0x12, 0x13, 0x35, 0x36, 0x3a}:
typn = "longfuse"
t = readTable(f, size, 17, 2)
elif typ in {0x17, 0x18, 0x25, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f}:
typn = "longval"
t = readTable(f, size, 28, 2)
elif typ == 4:
typn = "const"
t = readTable(f, size, 1, 2)
else:
raise ValueError("Unknown type {} at {}".format(hex(typ), hex(f.tell())))
tmap.setdefault(typn, {})[typ] = t
return tmap
def render_tile(d, ttyp):
w = d[ttyp]['width']
h = d[ttyp]['height']
tile = np.zeros((h, w), np.uint8)#+(255-ttyp)
for start, table in [(2, 'shortval'), (2, 'wire'), (16, 'longval'),
(1, 'longfuse'), (0, 'const')]:
if table in d[ttyp]:
for styp, sinfo in d[ttyp][table].items():
for i in sinfo:
for fuse in i[start:]:
if fuse > 0:
num = d['header']['fuse'][1][fuse][ttyp]
row = num // 100
col = num % 100
if table == "wire":
if i[0] > 0:
if tile[row][col] == 0:
tile[row][col] = (styp + i[1]) % 256
else:
tile[row][col] = (tile[row][col] + (styp + i[1]) % 256) // 2
elif table == "shortval" and styp == 5:
assert tile[row][col] == 0
tile[row][col] = (styp + i[0]) % 256
else:
tile[row][col] = styp
return tile
def render_bitmap(d):
tiles = d['header']['grid'][61]
width = sum([d[i]['width'] for i in tiles[0]])
height = sum([d[i[0]]['height'] for i in tiles])
bitmap = np.zeros((height, width), np.uint8)
y = 0
for row in tiles:
x=0
for typ in row:
#if typ==12: pdb.set_trace()
td = d[typ]
w = td['width']
h = td['height']
#bitmap[y:y+h,x:x+w] += render_tile(d, typ)
bitmap[y:y+h,x:x+w] = typ
x+=w
y+=h
return bitmap
def display(fname, data):
from PIL import Image
im = Image.frombytes(
mode='P',
size=data.shape[::-1],
data=data)
random.seed(123)
im.putpalette(random.choices(range(256), k=3*256))
if fname:
im.save(fname)
return im
def fuse_lookup(d, ttyp, fuse):
if fuse >= 0:
num = d['header']['fuse'][1][fuse][ttyp]
row = num // 100
col = num % 100
return row, col
def tile_bitmap(d, bitmap, empty=False):
tiles = d['header']['grid'][61]
width = sum([d[i]['width'] for i in tiles[0]])
height = sum([d[i[0]]['height'] for i in tiles])
res = {}
y = 0
for idx, row in enumerate(tiles):
x=0
for jdx, typ in enumerate(row):
#if typ==87: pdb.set_trace()
td = d[typ]
w = td['width']
h = td['height']
tile = bitmap[y:y+h,x:x+w]
if tile.any() or empty:
res[(idx, jdx, typ)] = tile
x+=w
y+=h
return res
def fuse_bitmap(d, bitmap):
tiles = d['header']['grid'][61]
width = sum([d[i]['width'] for i in tiles[0]])
height = sum([d[i[0]]['height'] for i in tiles])
res = np.zeros((height, width), dtype=np.uint8)
y = 0
for idx, row in enumerate(tiles):
x=0
for jdx, typ in enumerate(row):
td = d[typ]
w = td['width']
h = td['height']
res[y:y+h,x:x+w] = bitmap[(idx, jdx, typ)]
x+=w
y+=h
return res
def parse_tile(d, ttyp, tile):
w = d[ttyp]['width']
h = d[ttyp]['height']
res = {}
for start, table in [(2, 'shortval'), (2, 'wire'), (16, 'longval'),
(1, 'longfuse'), (0, 'const')]:
if table in d[ttyp]: # skip missing entries
for subtyp, tablerows in d[ttyp][table].items():
items = {}
for row in tablerows:
pos = row[0] > 0
coords = {(fuse_lookup(d, ttyp, f), pos) for f in row[start:] if f > 0}
idx = tuple(abs(attr) for attr in row[:start])
items.setdefault(idx, {}).update(coords)
#print(items)
for idx, item in items.items():
test = [tile[loc[0]][loc[1]] == val
for loc, val in item.items()]
if all(test):
row = idx + tuple(item.keys())
res.setdefault(table, {}).setdefault(subtyp, []).append(row)
return res
def scan_fuses(d, ttyp, tile):
w = d[ttyp]['width']
h = d[ttyp]['height']
fuses = []
rows, cols = np.where(tile==1)
for row, col in zip(rows, cols):
# ripe for optimization
for fnum, fuse in enumerate(d['header']['fuse'][1]):
num = fuse[ttyp]
frow = num // 100
fcol = num % 100
if frow == row and fcol == col and fnum > 100:
fuses.append(fnum)
return set(fuses)
def scan_tables(d, tiletyp, fuses):
res = []
for tname, tables in d[tiletyp].items():
if tname in {"width", "height"}: continue
for ttyp, table in tables.items():
for row in table:
row_fuses = fuses.intersection(row)
if row_fuses:
print(f"fuses {row_fuses} found in {tname}({ttyp}): {row}")
res.append(row)
return res
def reduce_rows(rows, fuses, start=16, tries=1000):
rowmap = {frozenset(iv[:iv.index(0)]): frozenset(iv[start:(list(iv)+[-1]).index(-1)]) for iv in rows}
features = {i for s in rowmap.keys() for i in s}
for _ in range(tries):
feat = random.sample(features, 1)[0]
features.remove(feat)
rem_fuses = set()
for k, v in rowmap.items():
if k & features:
rem_fuses.update(v)
if rem_fuses != fuses:
features.add(feat)
return features
if __name__ == "__main__":
with open(sys.argv[1], 'rb') as f:
d = readFse(f)
bm = render_bitmap(d)
display("fuse.png", bm)
t = render_tile(d, 12)
display("tile.png", t)
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/fuse_h4x.py
|
fuse_h4x.py
|
from dataclasses import dataclass, field
from typing import Dict, List, Set, Tuple, Union, ByteString, Any
from itertools import chain
import re
import copy
from functools import reduce
from collections import namedtuple
import numpy as np
import apycula.fuse_h4x as fuse
from apycula.wirenames import wirenames, clknames, clknumbers, hclknames, hclknumbers
from apycula import pindef
# the character that marks the I/O attributes that come from the nextpnr
mode_attr_sep = '&'
# represents a row, column coordinate
# can be either tiles or bits within tiles
Coord = Tuple[int, int]
@dataclass
class Bel:
"""Respresents a Basic ELement
with the specified modes mapped to bits
and the specified portmap"""
# there can be zero or more flags
flags: Dict[Union[int, str], Set[Coord]] = field(default_factory=dict)
# this Bel is IOBUF and needs routing to become IBUF or OBUF
simplified_iob: bool = field(default = False)
# differential signal capabilities info
is_diff: bool = field(default = False)
is_true_lvds: bool = field(default = False)
is_diff_p: bool = field(default = False)
# there can be only one mode, modes are exclusive
modes: Dict[Union[int, str], Set[Coord]] = field(default_factory=dict)
portmap: Dict[str, str] = field(default_factory=dict)
@property
def mode_bits(self):
return set().union(*self.modes.values())
@dataclass
class Tile:
"""Represents all the configurable features
for this specific tile type"""
width: int
height: int
# At the time of packing/unpacking the information about the types of cells
# is already lost, it is critical to work through the 'logicinfo' table so
# store it.
ttyp: int
# a mapping from dest, source wire to bit coordinates
pips: Dict[str, Dict[str, Set[Coord]]] = field(default_factory=dict)
clock_pips: Dict[str, Dict[str, Set[Coord]]] = field(default_factory=dict)
# XXX Since Himbaechel uses a system of nodes instead of aliases for clock
# wires, at first we would like to avoid mixing in a bunch of PIPs of
# different nature.
pure_clock_pips: Dict[str, Dict[str, Set[Coord]]] = field(default_factory=dict)
# fuses to disable the long wire columns. This is the table 'alonenode[6]' in the vendor file
# {dst: ({src}, {bits})}
alonenode_6: Dict[str, Tuple[Set[str], Set[Coord]]] = field(default_factory=dict)
# always-connected dest, src aliases
aliases: Dict[str, str] = field(default_factory=dict)
# a mapping from bel type to bel
bels: Dict[str, Bel] = field(default_factory=dict)
@dataclass
class Device:
# a grid of tiles
grid: List[List[Tile]] = field(default_factory=list)
timing: Dict[str, Dict[str, List[float]]] = field(default_factory=dict)
packages: Dict[str, Tuple[str, str, str]] = field(default_factory=dict)
# {variant: {package: {pin#: (pin_name, [cfgs])}}}
pinout: Dict[str, Dict[str, Dict[str, Tuple[str, List[str]]]]] = field(default_factory=dict)
pin_bank: Dict[str, int] = field(default_factory = dict)
cmd_hdr: List[ByteString] = field(default_factory=list)
cmd_ftr: List[ByteString] = field(default_factory=list)
template: np.ndarray = None
# allowable values of bel attributes
# {table_name: [(attr_id, attr_value)]}
logicinfo: Dict[str, List[Tuple[int, int]]] = field(default_factory=dict)
# fuses for a pair of the "features" (or pairs of parameter values)
# {ttype: {table_name: {(feature_A, feature_B): {bits}}}
shortval: Dict[int, Dict[str, Dict[Tuple[int, int], Set[Coord]]]] = field(default_factory=dict)
# fuses for 16 of the "features"
# {ttype: {table_name: {(feature_0, feature_1, ..., feature_15): {bits}}}
longval: Dict[int, Dict[str, Dict[Tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int], Set[Coord]]]] = field(default_factory=dict)
# always-connected dest, src aliases
aliases: Dict[Tuple[int, int, str], Tuple[int, int, str]] = field(default_factory=dict)
# for Himbaechel arch
# nodes - always connected wires {node_name: (wire_type, {(row, col, wire_name)})}
nodes: Dict[str, Tuple[str, Set[Tuple[int, int, str]]]] = field(default_factory = dict)
# strange bottom row IO. In order for OBUF and Co. to work, one of the four
# combinations must be applied to two special wires.
# (wire_a, wire_b, [(wire_a_net, wire_b_net)])
bottom_io: Tuple[str, str, List[Tuple[str, str]]] = field(default_factory = tuple)
# simplified IO rows
simplio_rows: Set[int] = field(default_factory = set)
# tile types by func. The same ttyp number can correspond to different
# functional blocks on different chips. For example 86 is the PLL head ttyp
# for GW2A-18 and the same number is used in GW1N-1 where it has nothing to
# do with PLL. { type_name: {type_num} }
tile_types: Dict[str, Set[int]] = field(default_factory = dict)
# supported differential IO primitives
diff_io_types: List[str] = field(default_factory = list)
# HCLK pips depend on the location of the cell, not on the type, so they
# are difficult to match with the deduplicated description of the tile
# { (y, x) : pips}
hclk_pips: Dict[Tuple[int, int], Dict[str, Dict[str, Set[Coord]]]] = field(default_factory=dict)
# extra cell functions besides main type like
# - OSCx
# - GSR
# - OSER16/IDES16
# - ref to hclk_pips
# - disabled blocks
extra_func: Dict[Tuple[int, int], Dict[str, Any]] = field(default_factory=dict)
@property
def rows(self):
return len(self.grid)
@property
def cols(self):
return len(self.grid[0])
@property
def height(self):
return sum(row[0].height for row in self.grid)
@property
def width(self):
return sum(tile.width for tile in self.grid[0])
# XXX consider removing
@property
def corners(self):
# { (row, col) : bank# }
return {
(0, 0) : '0',
(0, self.cols - 1) : '1',
(self.rows - 1, self.cols - 1) : '2',
(self.rows - 1, 0) : '3'}
# Some chips have bits responsible for different banks in the same corner tile.
# Here stores the correspondence of the bank number to the (row, col) of the tile.
@property
def bank_tiles(self):
# { bank# : (row, col) }
res = {}
for pos in self.corners.keys():
row, col = pos
for bel in self.grid[row][col].bels.keys():
if bel[0:4] == 'BANK':
res.update({ bel[4:] : pos })
return res
def unpad(fuses, pad=-1):
try:
return fuses[:fuses.index(pad)]
except ValueError:
return fuses
def fse_pips(fse, ttyp, table=2, wn=wirenames):
pips = {}
if table in fse[ttyp]['wire']:
for srcid, destid, *fuses in fse[ttyp]['wire'][table]:
fuses = {fuse.fuse_lookup(fse, ttyp, f) for f in unpad(fuses)}
if srcid < 0:
fuses = set()
srcid = -srcid
src = wn.get(srcid, str(srcid))
dest = wn.get(destid, str(destid))
pips.setdefault(dest, {})[src] = fuses
return pips
_supported_hclk_wires = {'SPINE2', 'SPINE3', 'SPINE4', 'SPINE5', 'SPINE10', 'SPINE11',
'SPINE12', 'SPINE13', 'SPINE16', 'SPINE17', 'SPINE18', 'SPINE19',
'VSS', 'VCC', 'PCLKT0', 'PCLKT1', 'PCLKB0', 'PCLKB1',
'PCLKL0', 'PCLKL1','PCLKR0', 'PCLKR1',
'TBDHCLK0', 'TBDHCLK1', 'TBDHCLK2', 'TBDHCLK3', 'BBDHCLK0',
'BBDHCLK1', 'BBDHCLK2', 'BBDHCLK3', 'LBDHCLK0', 'LBDHCLK1',
'LBDHCLK2', 'LBDHCLK3', 'RBDHCLK0', 'RBDHCLK1', 'RBDHCLK2',
'RBDHCLK3',
}
# Some chips at least -9C treat these wires as the same
_xxx_hclk_wires = {'SPINE16': 'SPINE2', 'SPINE18': 'SPINE4'}
def fse_hclk_pips(fse, ttyp, aliases):
pips = fse_pips(fse, ttyp, table = 48, wn = clknames)
res = {}
for dest, src_fuses in pips.items():
if dest not in _supported_hclk_wires:
continue
for src, fuses in src_fuses.items():
if src in _supported_hclk_wires:
res.setdefault(dest, {})[src] = fuses
if src in _xxx_hclk_wires.keys():
aliases.update({src: _xxx_hclk_wires[src]})
return res
def fse_alonenode(fse, ttyp, table = 6):
pips = {}
if 'alonenode' in fse[ttyp].keys():
if table in fse[ttyp]['alonenode']:
for destid, *tail in fse[ttyp]['alonenode'][table]:
fuses = {fuse.fuse_lookup(fse, ttyp, f) for f in unpad(tail[-2:])}
srcs = {wirenames.get(srcid, str(srcid)) for srcid in unpad(tail[:-2])}
dest = wirenames.get(destid, str(destid))
pips[dest] = (srcs, fuses)
return pips
# make PLL bels
def fse_pll(device, fse, ttyp):
bels = {}
if device in {'GW1N-1', 'GW1NZ-1'}:
if ttyp == 88:
bel = bels.setdefault('RPLLA', Bel())
elif ttyp == 89:
bel = bels.setdefault('RPLLB', Bel())
elif device in {'GW1NS-2'}:
if ttyp in {87}:
bel = bels.setdefault('RPLLA', Bel())
elif device in {'GW1NS-4'}:
if ttyp in {88, 89}:
bel = bels.setdefault('PLLVR', Bel())
elif device == 'GW1N-4':
if ttyp in {74, 77}:
bel = bels.setdefault('RPLLA', Bel())
elif ttyp in {75, 78}:
bel = bels.setdefault('RPLLB', Bel())
elif device in {'GW1N-9C', 'GW1N-9'}:
if ttyp in {86, 87}:
bel = bels.setdefault('RPLLA', Bel())
elif ttyp in {74, 75, 76, 77, 78, 79}:
bel = bels.setdefault('RPLLB', Bel())
elif device in {'GW2A-18', 'GW2A-18C'}:
if ttyp in {42, 45}:
bel = bels.setdefault('RPLLA', Bel())
elif ttyp in {74, 75, 76, 77, 78, 79}:
bel = bels.setdefault('RPLLB', Bel())
return bels
# add the ALU mode
# new_mode_bits: string like "0110000010011010"
def add_alu_mode(base_mode, modes, lut, new_alu_mode, new_mode_bits):
alu_mode = modes.setdefault(new_alu_mode, set())
alu_mode.update(base_mode)
for i, bit in enumerate(new_mode_bits):
if bit == '0':
alu_mode.update(lut.flags[15 - i])
# also make DFFs, ALUs and shadow RAM
def fse_luts(fse, ttyp):
data = fse[ttyp]['shortval'][5]
luts = {}
for lutn, bit, *fuses in data:
coord = fuse.fuse_lookup(fse, ttyp, fuses[0])
bel = luts.setdefault(f"LUT{lutn}", Bel())
bel.flags[bit] = {coord}
# dicts are in insertion order
for num, lut in enumerate(luts.values()):
lut.portmap = {
'F': f"F{num}",
'I0': f"A{num}",
'I1': f"B{num}",
'I2': f"C{num}",
'I3': f"D{num}",
}
# main fuse: enable two ALUs in the slice
# shortval(25/26/27) [1, 0, fuses]
for cls, fuse_idx in enumerate([25, 26, 27]):
try:
data = fse[ttyp]['shortval'][fuse_idx]
except KeyError:
continue
for i in range(2):
# DFF
bel = luts.setdefault(f"DFF{cls * 2 + i}", Bel())
bel.portmap = {
# D inputs hardwired to LUT F
'Q' : f"Q{cls * 2 + i}",
'CLK': f"CLK{cls}",
'LSR': f"LSR{cls}", # set/reset
'CE' : f"CE{cls}", # clock enable
}
# ALU
alu_idx = cls * 2 + i
bel = luts.setdefault(f"ALU{alu_idx}", Bel())
mode = set()
for key0, key1, *fuses in data:
if key0 == 1 and key1 == 0:
for f in (f for f in fuses if f != -1):
coord = fuse.fuse_lookup(fse, ttyp, f)
mode.update({coord})
break
lut = luts[f"LUT{alu_idx}"]
# ADD INIT="0011 0000 1100 1100"
# add 0 add carry
add_alu_mode(mode, bel.modes, lut, "0", "0011000011001100")
# SUB INIT="1010 0000 0101 1010"
# add 0 add carry
add_alu_mode(mode, bel.modes, lut, "1", "1010000001011010")
# ADDSUB INIT="0110 0000 1001 1010"
# add 0 sub carry
add_alu_mode(mode, bel.modes, lut, "2", "0110000010011010")
add_alu_mode(mode, bel.modes, lut, "hadder", "1111000000000000")
# NE INIT="1001 0000 1001 1111"
# add 0 sub carry
add_alu_mode(mode, bel.modes, lut, "3", "1001000010011111")
# GE
add_alu_mode(mode, bel.modes, lut, "4", "1001000010011010")
# LE
# no mode, just swap I0 and I1
# CUP
add_alu_mode(mode, bel.modes, lut, "6", "1010000010100000")
# CDN
add_alu_mode(mode, bel.modes, lut, "7", "0101000001011111")
# CUPCDN
# The functionality of this seems to be the same with SUB
# add_alu_mode(mode, bel.modes, lut, "8", "1010000001011010")
# MULT INIT="0111 1000 1000 1000"
#
add_alu_mode(mode, bel.modes, lut, "9", "0111100010001000")
# CIN->LOGIC INIT="0000 0000 0000 0000"
# nop 0 nop carry
# side effect: clears the carry
add_alu_mode(mode, bel.modes, lut, "C2L", "0000000000000000")
# 1->CIN INIT="0000 0000 0000 1111"
# nop 0 nop carry
add_alu_mode(mode, bel.modes, lut, "ONE2C", "0000000000001111")
bel.portmap = {
'COUT': f"COUT{alu_idx}",
'CIN': f"CIN{alu_idx}",
'SUM': f"F{alu_idx}",
'I0': f"A{alu_idx}",
'I1': f"B{alu_idx}",
'I3': f"D{alu_idx}",
}
# main fuse: enable shadow SRAM in the slice
# shortval(28) [2, 0, fuses]
if 28 in fse[ttyp]['shortval']:
for i in range(6):
bel = luts.setdefault(f"DFF{i}", Bel())
mode = bel.modes.setdefault("RAM", set())
for key0, key1, *fuses in fse[ttyp]['shortval'][25+i//2]:
if key0 < 0:
for f in fuses:
if f == -1: break
coord = fuse.fuse_lookup(fse, ttyp, f)
mode.add(coord)
bel = luts.setdefault(f"RAM16", Bel())
mode = bel.modes.setdefault("0", set())
for key0, key1, *fuses in fse[ttyp]['shortval'][28]:
if key0 == 2 and key1 == 0:
for f in fuses:
if f == -1: break
coord = fuse.fuse_lookup(fse, ttyp, f)
mode.add(coord)
bel.portmap = {
'DI': ("A5", "B5", "C5", "D5"),
'CLK': "CLK2",
'WRE': "LSR2",
'WAD': ("A4", "B4", "C4", "D4"),
'RAD': tuple(tuple(f"{j}{i}" for i in range(4)) for j in ["A", "B", "C", "D"]),
'DO': ("F0", "F1", "F2", "F3"),
}
return luts
def fse_osc(device, fse, ttyp):
osc = {}
if device in {'GW1N-4', 'GW1N-9', 'GW1N-9C', 'GW2A-18', 'GW2A-18C'}:
bel = osc.setdefault(f"OSC", Bel())
elif device in {'GW1NZ-1', 'GW1NS-4'}:
bel = osc.setdefault(f"OSCZ", Bel())
elif device == 'GW1NS-2':
bel = osc.setdefault(f"OSCF", Bel())
elif device == 'GW1N-1':
bel = osc.setdefault(f"OSCH", Bel())
elif device == 'GW2AN-18':
bel = osc.setdefault(f"OSCW", Bel())
elif device == 'GW1N-2':
bel = osc.setdefault(f"OSCO", Bel())
else:
raise Exception(f"Oscillator not yet supported on {device}")
bel.portmap = {}
return osc
def set_banks(fse, db):
# fill the bank# : corner tile table
w = db.cols - 1
h = db.rows - 1
for row, col in [(0, 0), (0, w), (h, 0), (h, w)]:
ttyp = fse['header']['grid'][61][row][col]
if 'longval' in fse[ttyp].keys():
if 37 in fse[ttyp]['longval'].keys():
for rd in fse[ttyp]['longval'][37]:
db.grid[row][col].bels.setdefault(f"BANK{rd[0]}", Bel())
_known_logic_tables = {
8: 'DCS',
9: 'GSR',
10: 'IOLOGIC',
11: 'IOB',
12: 'SLICE',
13: 'BSRAM',
14: 'DSP',
15: 'PLL',
59: 'CFG',
62: 'OSC',
63: 'USB',
}
_known_tables = {
4: 'CONST',
5: 'LUT',
20: 'GSR',
21: 'IOLOGICA',
22: 'IOLOGICB',
23: 'IOBA',
24: 'IOBB',
25: 'CLS0',
26: 'CLS1',
27: 'CLS2',
28: 'CLS3',
35: 'PLL',
37: 'BANK',
40: 'IOBC',
41: 'IOBD',
42: 'IOBE',
43: 'IOBF',
44: 'IOBG',
45: 'IOBH',
46: 'IOBI',
47: 'IOBJ',
51: 'OSC',
53: 'DLLDEL0',
54: 'DLLDEL1',
56: 'DLL0',
60: 'CFG',
64: 'USB',
66: 'EFLASH',
68: 'ADC',
80: 'DLL1',
82: 'POWERSAVE',
}
def fse_fill_logic_tables(dev, fse):
# logicinfo
for ltable in fse['header']['logicinfo'].keys():
if ltable in _known_logic_tables.keys():
table = dev.logicinfo.setdefault(_known_logic_tables[ltable], [])
else:
table = dev.logicinfo.setdefault(f"unknown_{ltable}", [])
for attr, val, _ in fse['header']['logicinfo'][ltable]:
table.append((attr, val))
# shortval
ttypes = {t for row in fse['header']['grid'][61] for t in row}
for ttyp in ttypes:
if 'shortval' in fse[ttyp].keys():
ttyp_rec = dev.shortval.setdefault(ttyp, {})
for stable in fse[ttyp]['shortval'].keys():
if stable in _known_tables:
table = ttyp_rec.setdefault(_known_tables[stable], {})
else:
table = ttyp_rec.setdefault(f"unknown_{stable}", {})
for f_a, f_b, *fuses in fse[ttyp]['shortval'][stable]:
table[(f_a, f_b)] = {fuse.fuse_lookup(fse, ttyp, f) for f in unpad(fuses)}
if 'longval' in fse[ttyp].keys():
ttyp_rec = dev.longval.setdefault(ttyp, {})
for ltable in fse[ttyp]['longval'].keys():
if ltable in _known_tables:
table = ttyp_rec.setdefault(_known_tables[ltable], {})
else:
table = ttyp_rec.setdefault(f"unknown_{ltable}", {})
for f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14, f15, *fuses in fse[ttyp]['longval'][ltable]:
table[(f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14, f15)] = {fuse.fuse_lookup(fse, ttyp, f) for f in unpad(fuses)}
_hclk_in = {
'TBDHCLK0': 0, 'TBDHCLK1': 1, 'TBDHCLK2': 2, 'TBDHCLK3': 3,
'BBDHCLK0': 4, 'BBDHCLK1': 5, 'BBDHCLK2': 6, 'BBDHCLK3': 7,
'LBDHCLK0': 8, 'LBDHCLK1': 9, 'LBDHCLK2': 10, 'LBDHCLK3': 11,
'RBDHCLK0': 12, 'RBDHCLK1': 13, 'RBDHCLK2': 14, 'RBDHCLK3': 15}
def fse_create_hclk_aliases(db, device, dat):
for row in range(db.rows):
for col in range(db.cols):
for src_fuses in db.grid[row][col].clock_pips.values():
for src in src_fuses.keys():
if src in _hclk_in.keys():
source = dat['CmuxIns'][str(90 + _hclk_in[src])]
db.aliases[(row, col, src)] = (source[0] - 1, source[1] - 1, wirenames[source[2]])
# hclk->fclk
# top
row = 0
if device == 'GW1N-1':
for col in range(1, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
elif device in {'GW1NZ-1'}:
for col in range(1, 10):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (0, 5, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (0, 5, 'SPINE12')
for col in range(10, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (0, 5, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (0, 5, 'SPINE13')
elif device in {'GW1N-4'}:
for col in range(1, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
elif device in {'GW1NS-4'}:
for col in range(1, 11):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 18, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, 18, 'SPINE12')
for col in range(11, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 18, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, 18, 'SPINE13')
elif device in {'GW1N-9'}:
for col in range(1, 28):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 0, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE12')
for col in range(28, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 0, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE13')
elif device in {'GW1N-9C'}:
for col in range(1, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (0, db.cols - 1, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (0, db.cols - 1, 'SPINE13')
# right
col = db.cols - 1
if device == 'GW1N-1':
for row in range(1, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
elif device in {'GW1NZ-1'}:
for row in range(1, 5):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (5, col, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (5, col, 'SPINE12')
for row in range(6, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (5, col, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (5, col, 'SPINE13')
elif device in {'GW1N-4'}:
for row in range(1, db.rows - 1):
if row not in {8, 9, 10, 11}:
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
for row in range(1, 9):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (9, col, 'SPINE12')
for row in range(10, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (9, col, 'SPINE13')
elif device in {'GW1NS-4'}:
for row in range(1, 9):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (9, col, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (9, col, 'SPINE12')
for row in range(9, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (9, col, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (9, col, 'SPINE13')
elif device in {'GW1N-9'}:
for row in range(1, 19):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (18, col, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (18, col, 'SPINE12')
for row in range(19, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (18, col, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (18, col, 'SPINE13')
elif device in {'GW1N-9C'}:
for row in range(1, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (18, col, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (18, col, 'SPINE13')
# left
col = 0
if device == 'GW1N-1':
for row in range(1, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
elif device in {'GW1N-4'}:
for row in range(1, db.rows - 1):
if row not in {8, 9, 10, 11}:
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
for row in range(1, 9):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (9, col, 'SPINE12')
for row in range(10, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (9, col, 'SPINE13')
elif device in {'GW1N-9'}:
for row in range(1, 19):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (18, col, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (18, col, 'SPINE12')
for row in range(19, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (18, col, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (18, col, 'SPINE13')
elif device in {'GW1N-9C'}:
for row in range(1, db.rows - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (18, 0, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (18, 0, 'SPINE13')
# bottom
row = db.rows - 1
if device == 'GW1N-1':
for col in range(1, 10):
if col not in {8, 9}:
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols -1, 'SPINE12')
for col in range(10, db.cols - 1):
if col not in {10, 11}:
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE13')
elif device in {'GW1N-4'}:
for col in range(1, 19):
if col not in {17, 18}:
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols -1, 'SPINE12')
for col in range(19, db.cols - 1):
if col not in {19, 20}:
db.grid[row][col].clock_pips['FCLK'] = {'CLK2': {}}
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE13')
elif device in {'GW1NS-4'}:
db.aliases[(row, 17, 'SPINE2')] = (row, 16, 'SPINE2')
for col in range(1, 16):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 17, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, 20, 'SPINE12')
for col in range(21, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 17, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, 20, 'SPINE13')
elif device in {'GW1N-9'}:
for col in range(1, 28):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 0, 'SPINE10')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE12')
for col in range(28, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 0, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE13')
elif device in {'GW1N-9C'}:
for col in range(1, db.cols - 1):
db.grid[row][col].clock_pips['FCLK'] = {'HCLK0': {}}
db.aliases[(row, col, 'HCLK0')] = (row, 0, 'SPINE11')
db.grid[row][col].clock_pips['FCLK'] = {'HCLK1': {}}
db.aliases[(row, col, 'HCLK1')] = (row, db.cols - 1, 'SPINE13')
# HCLK for Himbaechel
#
# hclk - locs of hclk control this side. The location of the HCLK is determined
# by the presence of table 48 in the 'wire' table of the cell. If there is
# such a table, then there are fuses for managing HCLK muxes. HCLK affiliation
# is determined empirically by comparing an empty image and an image with one
# OSER4 located on the side of the chip of interest.
#
# edges - how cells along this side can connect to hclk.
# Usually a specific HCLK is responsible for the nearest half side of the chip,
# but sometimes the IDE refuses to put IOLOGIC in one or two cells in the
# middle of the side, do not specify such cells as controlled by HCLK.
#
# CLK2/HCLK_OUT# - These are determined by putting two OSER4s in the same IO
# with different FCLK networks - this will force the IDE to use two ways to
# provide fast clocks to the primitives in the same cell. What exactly was used
# is determined by the fuses used and table 2 of this cell (if CLK2 was used)
# or table 48 of the HCLK responsible for this half (we already know which of
# the previous chags)
_hclk_to_fclk = {
'GW1N-1': {
'B': {
'hclk': {(10, 0), (10, 19)},
'edges': {
( 1, 10) : {'CLK2', 'HCLK_OUT2'},
(10, 19) : {'CLK2', 'HCLK_OUT3'},
},
},
'T': {
'edges': {
( 1, 19) : {'CLK2'},
},
},
'L': {
'edges': {
( 1, 10) : {'CLK2'},
},
},
'R': {
'edges': {
( 1, 10) : {'CLK2'},
},
},
},
'GW1NZ-1': {
'T': {
'hclk': {(0, 5)},
'edges': {
( 1, 10) : {'HCLK_OUT0', 'HCLK_OUT2'},
(10, 19) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(5, 19)},
'edges': {
( 1, 5) : {'HCLK_OUT0', 'HCLK_OUT2'},
( 6, 10) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
'GW1NS-2': {
'B': {
'hclk': {(14, 0), (14, 19)},
'edges': {
( 1, 10) : {'HCLK_OUT0', 'HCLK_OUT2'},
(10, 19) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'T': {
'hclk': {(0, 0), (0, 19)},
'edges': {
( 1, 10) : {'HCLK_OUT0', 'HCLK_OUT2'},
(10, 19) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'L': {
'hclk': {(5, 0)},
'edges': {
( 1, 5) : {'HCLK_OUT0', 'HCLK_OUT2'},
( 6, 14) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(5, 19)},
'edges': {
( 1, 5) : {'HCLK_OUT0', 'HCLK_OUT2'},
(6, 14) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
'GW1N-4': {
'B': {
'hclk': {(19, 0), (19, 37)},
'edges': {
( 1, 19) : {'CLK2', 'HCLK_OUT2'},
(19, 37) : {'CLK2', 'HCLK_OUT3'},
},
},
'T': {
'edges': {
( 1, 37) : {'CLK2'},
},
},
'L': {
'hclk': {(9, 0)},
'edges': {
( 1, 9) : {'CLK2', 'HCLK_OUT2'},
(10, 19) : {'CLK2', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(9, 37)},
'edges': {
( 1, 9) : {'CLK2', 'HCLK_OUT2'},
(10, 19) : {'CLK2', 'HCLK_OUT3'},
},
},
},
'GW1NS-4': {
'B': {
'hclk': {(19, 16), (19, 17), (19, 20)},
'edges': {
( 1, 16) : {'HCLK_OUT0', 'HCLK_OUT2'},
(21, 37) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'T': {
'hclk': {(0, 18)},
'edges': {
( 1, 10) : {'HCLK_OUT0', 'HCLK_OUT2'},
(10, 37) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(9, 37)},
'edges': {
( 1, 9) : {'HCLK_OUT0', 'HCLK_OUT2'},
(9, 19) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
'GW1N-9': {
'B': {
'hclk': {(28, 0), (28, 46)},
'edges': {
( 1, 28) : {'HCLK_OUT0', 'HCLK_OUT2'},
(28, 46) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'T': {
'hclk': {(0, 0), (0, 46)},
'edges': {
( 1, 28) : {'HCLK_OUT0', 'HCLK_OUT2'},
(28, 46) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'L': {
'hclk': {(18, 0)},
'edges': {
( 1, 19) : {'HCLK_OUT0', 'HCLK_OUT2'},
(19, 28) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(18, 46)},
'edges': {
( 1, 19) : {'HCLK_OUT0', 'HCLK_OUT2'},
(19, 28) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
'GW1N-9C': {
'B': {
'hclk': {(28, 0), (28, 46)},
'edges': {
( 1, 46) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'T': {
'hclk': {(0, 0), (0, 46)},
'edges': {
( 1, 46) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'L': {
'hclk': {(18, 0)},
'edges': {
( 1, 28) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(18, 46)},
'edges': {
( 1, 28) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
'GW2A-18': {
'B': {
'hclk': {(54, 27), (54, 28)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(29, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'T': {
'hclk': {(0, 27), (0, 28)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(29, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'L': {
'hclk': {(27, 0)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(28, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(27, 55)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(28, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
'GW2A-18C': {
'B': {
'hclk': {(54, 27), (54, 28)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(29, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'T': {
'hclk': {(0, 27), (0, 28)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(29, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'L': {
'hclk': {(27, 0)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(28, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
'R': {
'hclk': {(27, 55)},
'edges': {
( 1, 27) : {'HCLK_OUT0', 'HCLK_OUT2'},
(28, 55) : {'HCLK_OUT1', 'HCLK_OUT3'},
},
},
},
}
_global_wire_prefixes = {'PCLK', 'TBDHCLK', 'BBDHCLK', 'RBDHCLK', 'LBDHCLK',
'TLPLL', 'TRPLL', 'BLPLL', 'BRPLL'}
def fse_create_hclk_nodes(dev, device, fse, dat):
# XXX
if device not in _hclk_to_fclk:
return
hclk_info = _hclk_to_fclk[device]
for side in 'BRTL':
if side not in hclk_info:
continue
# create HCLK nodes
hclks = {}
# entries to the HCLK from logic
for hclk_idx, row, col, wire_idx in {(i, dat['CmuxIns'][str(i - 80)][0] - 1, dat['CmuxIns'][str(i - 80)][1] - 1, dat['CmuxIns'][str(i - 80)][2]) for i in range(hclknumbers['TBDHCLK0'], hclknumbers['RBDHCLK3'] + 1)}:
if row != -2:
dev.nodes.setdefault(hclknames[hclk_idx], ("HCLK", set()))[1].add((row, col, wirenames[wire_idx]))
if 'hclk' in hclk_info[side]:
# create HCLK cells pips
for hclk_loc in hclk_info[side]['hclk']:
row, col = hclk_loc
ttyp = fse['header']['grid'][61][row][col]
dev.hclk_pips[(row, col)] = fse_pips(fse, ttyp, table = 48, wn = hclknames)
# connect local wires like PCLKT0 etc to the global nodes
for srcs in dev.hclk_pips[(row, col)].values():
for src in srcs.keys():
for pfx in _global_wire_prefixes:
if src.startswith(pfx):
dev.nodes.setdefault(src, ('HCLK', set()))[1].add((row, col, src))
# strange GW1N-9C input-input aliases
for i in {0, 2}:
dev.nodes.setdefault(f'X{col}Y{row}/HCLK9-{i}', ('HCLK', {(row, col, f'HCLK_IN{i}')}))[1].add((row, col, f'HCLK_9IN{i}'))
for i in range(4):
hnam = f'HCLK_OUT{i}'
wires = dev.nodes.setdefault(f'{side}{hnam}', ("HCLK", set()))[1]
hclks[hnam] = wires
for hclk_loc in hclk_info[side]['hclk']:
row, col = hclk_loc
wires.add((row, col, hnam))
# create pips from HCLK spines to FCLK inputs of IO logic
for edge, srcs in hclk_info[side]['edges'].items():
if side in 'TB':
row = {'T': 0, 'B': dev.rows - 1}[side]
for col in range(edge[0], edge[1]):
if 'IOLOGICA' not in dev.grid[row][col].bels:
continue
pips = dev.hclk_pips.setdefault((row, col), {})
for dst in 'AB':
for src in srcs:
pips.setdefault(f'FCLK{dst}', {}).update({src: set()})
if src.startswith('HCLK'):
hclks[src].add((row, col, src))
else:
col = {'L': 0, 'R': dev.cols - 1}[side]
for row in range(edge[0], edge[1]):
if 'IOLOGICA' not in dev.grid[row][col].bels:
continue
pips = dev.hclk_pips.setdefault((row, col), {})
for dst in 'AB':
for src in srcs:
pips.setdefault(f'FCLK{dst}', {}).update({src: set()})
if src.startswith('HCLK'):
hclks[src].add((row, col, src))
_pll_loc = {
'GW1N-1':
{'TRPLL0CLK0': (0, 17, 'F4'), 'TRPLL0CLK1': (0, 17, 'F5'),
'TRPLL0CLK2': (0, 17, 'F6'), 'TRPLL0CLK3': (0, 17, 'F7'), },
'GW1NZ-1':
{'TRPLL0CLK0': (0, 17, 'F4'), 'TRPLL0CLK1': (0, 17, 'F5'),
'TRPLL0CLK2': (0, 17, 'F6'), 'TRPLL0CLK3': (0, 17, 'F7'), },
'GW1NS-2':
{'TRPLL0CLK0': (5, 19, 'F4'), 'TRPLL0CLK1': (5, 19, 'F7'),
'TRPLL0CLK2': (5, 19, 'F5'), 'TRPLL0CLK3': (5, 19, 'F6'), },
'GW1N-4':
{'TLPLL0CLK0': (0, 9, 'F4'), 'TLPLL0CLK1': (0, 9, 'F7'),
'TLPLL0CLK2': (0, 9, 'F6'), 'TLPLL0CLK3': (0, 9, 'F5'),
'TRPLL0CLK0': (0, 27, 'F4'), 'TRPLL0CLK1': (0, 27, 'F7'),
'TRPLL0CLK2': (0, 27, 'F6'), 'TRPLL0CLK3': (0, 27, 'F5'), },
'GW1NS-4':
{'TLPLL0CLK0': (0, 27, 'F4'), 'TLPLL0CLK1': (0, 27, 'F7'),
'TLPLL0CLK2': (0, 27, 'F6'), 'TLPLL0CLK3': (0, 27, 'F5'),
'TRPLL0CLK0': (0, 36, 'F4'), 'TRPLL0CLK1': (0, 36, 'F7'),
'TRPLL0CLK2': (0, 36, 'F6'), 'TRPLL0CLK3': (0, 36, 'F5'), },
'GW1N-9C':
{'TLPLL0CLK0': (9, 2, 'F4'), 'TLPLL0CLK1': (9, 2, 'F7'),
'TLPLL0CLK2': (9, 2, 'F5'), 'TLPLL0CLK3': (9, 2, 'F6'),
'TRPLL0CLK0': (9, 44, 'F4'), 'TRPLL0CLK1': (9, 44, 'F7'),
'TRPLL0CLK2': (9, 44, 'F5'), 'TRPLL0CLK3': (9, 44, 'F6'), },
'GW1N-9':
{'TLPLL0CLK0': (9, 2, 'F4'), 'TLPLL0CLK1': (9, 2, 'F7'),
'TLPLL0CLK2': (9, 2, 'F5'), 'TLPLL0CLK3': (9, 2, 'F6'),
'TRPLL0CLK0': (9, 44, 'F4'), 'TRPLL0CLK1': (9, 44, 'F7'),
'TRPLL0CLK2': (9, 44, 'F5'), 'TRPLL0CLK3': (9, 44, 'F6'), },
'GW2A-18':
{'TLPLL0CLK0': (9, 2, 'F4'), 'TLPLL0CLK1': (9, 2, 'F7'),
'TLPLL0CLK2': (9, 2, 'F5'), 'TLPLL0CLK3': (9, 2, 'F6'),
'TRPLL0CLK0': (9, 53, 'F4'), 'TRPLL0CLK1': (9, 53, 'F7'),
'TRPLL0CLK2': (9, 53, 'F5'), 'TRPLL0CLK3': (9, 53, 'F6'),
'BLPLL0CLK0': (45, 2, 'F4'), 'BLPLL0CLK1': (45, 2, 'F7'),
'BLPLL0CLK2': (45, 2, 'F5'), 'BLPLL0CLK3': (45, 2, 'F6'),
'BRPLL0CLK0': (45, 53, 'F4'), 'BRPLL0CLK1': (45, 53, 'F7'),
'BRPLL0CLK2': (45, 53, 'F5'), 'BRPLL0CLK3': (45, 53, 'F6'), },
'GW2A-18C':
{'TLPLL0CLK0': (9, 2, 'F4'), 'TLPLL0CLK1': (9, 2, 'F7'),
'TLPLL0CLK2': (9, 2, 'F5'), 'TLPLL0CLK3': (9, 2, 'F6'),
'TRPLL0CLK0': (9, 53, 'F4'), 'TRPLL0CLK1': (9, 53, 'F7'),
'TRPLL0CLK2': (9, 53, 'F5'), 'TRPLL0CLK3': (9, 53, 'F6'),
'BLPLL0CLK0': (45, 2, 'F4'), 'BLPLL0CLK1': (45, 2, 'F7'),
'BLPLL0CLK2': (45, 2, 'F5'), 'BLPLL0CLK3': (45, 2, 'F6'),
'BRPLL0CLK0': (45, 53, 'F4'), 'BRPLL0CLK1': (45, 53, 'F7'),
'BRPLL0CLK2': (45, 53, 'F5'), 'BRPLL0CLK3': (45, 53, 'F6'), },
}
def fse_create_pll_clock_aliases(db, device):
# we know exactly where the PLL is and therefore know which aliases to create
for row in range(db.rows):
for col in range(db.cols):
for w_dst, w_srcs in db.grid[row][col].clock_pips.items():
for w_src in w_srcs.keys():
if device in {'GW1N-1', 'GW1NZ-1', 'GW1NS-2', 'GW1NS-4', 'GW1N-4', 'GW1N-9C', 'GW1N-9', 'GW2A-18', 'GW2A-18C'}:
if w_src in _pll_loc[device].keys():
db.aliases[(row, col, w_src)] = _pll_loc[device][w_src]
# Himbaechel node
db.nodes.setdefault(w_src, ("PLL_O", set()))[1].add((row, col, w_src))
# Himbaechel HCLK
if (row, col) in db.hclk_pips:
for w_dst, w_srcs in db.hclk_pips[row, col].items():
for w_src in w_srcs.keys():
if device in {'GW1N-1', 'GW1NZ-1', 'GW1NS-2', 'GW1NS-4', 'GW1N-4', 'GW1N-9C', 'GW1N-9', 'GW2A-18', 'GW2A-18C'}:
if w_src in _pll_loc[device]:
db.nodes.setdefault(w_src, ("PLL_O", set()))[1].add((row, col, w_src))
# from Gowin Programmable IO (GPIO) User Guide:
#
# IOL6 and IOR6 pins of devices of GW1N-1, GW1NR-1, GW1NZ-1, GW1NS-2,
# GW1NS-2C, GW1NSR-2C, GW1NSR-2 and GW1NSE-2C do not support IO logic.
# IOT2 and IOT3A pins of GW1N-2, GW1NR-2, GW1N-1P5, GW1N-2B, GW1N-1P5B,
# GW1NR-2B devices do not support IO logic.
# IOL10 and IOR10 pins of the devices of GW1N-4, GW1N-4B, GW1NR-4, GW1NR-4B,
# ==========================================================================
# These are cells along the edges of the chip and their types are taken from
# fse['header']['grid'][61][row][col] and it was checked whether or not the IDE
# would allow placing IOLOGIC there.
def fse_iologic(device, fse, ttyp):
bels = {}
# some iocells nave no iologic
if ttyp in {48, 49, 50, 51}:
return bels
if device in {'GW1N-1', 'GW1NZ-1', 'GW1NS-2', 'GW1N-4', 'GW1NS-4'} and ttyp in {86, 87}:
return bels
if device in {'GW1NS-4'} and ttyp in {86, 87, 135, 136, 137, 138}:
return bels
if 'shortval' in fse[ttyp].keys():
if 21 in fse[ttyp]['shortval'].keys():
bels['IOLOGICA'] = Bel()
if 22 in fse[ttyp]['shortval'].keys():
bels['IOLOGICB'] = Bel()
# 16bit
if device in {'GW1NS-4'} and ttyp in {142, 143, 144, 58, 59}:
bels['OSER16'] = Bel()
bels['IDES16'] = Bel()
if device in {'GW1N-9', 'GW1N-9C'} and ttyp in {52, 66, 63, 91, 92}:
bels['OSER16'] = Bel()
bels['IDES16'] = Bel()
return bels
# create clock aliases
# to understand how the clock works in gowin, it is useful to read the experiments of Pepijndevos
# https://github.com/YosysHQ/apicula/blob/master/clock_experiments.ipynb
# especially since I was deriving everything based on that information.
# It is impossible to get rid of fuzzing, the difference is that I do it
# manually to check the observed patterns and assumptions, and then
# programmatically fix the found formulas.
# We have 8 clocks, which are divided into two parts: 0-3 and 4-7. They are
# located in pairs: 0 and 4, 1 and 5, 2 and 6, 3 and 7. From here it is enough
# to consider only the location of wires 0-3.
# So tap_start describes along which column the wire of a particular clock is located.
# This is derived from the Out[26] table (see
# https://github.com/YosysHQ/apicula/blob/master/clock_experiments.ipynb)
# The index in [1, 0, 3, 2] is the relative position of tap (hence tap_start)
# in the four column space.
# tap column 0 -> clock #1
# tap column 1 -> clock #0
# tap column 2 -> clock #3
# tap column 3 -> clock #2
# Out[26] also implies the repeatability of the columns, here it is fixed as a formula:
# (tap column) % 4 -> clock #
# for example 6 % 4 -> clock #3
# If you look closely at Out[26], then we can say that the formula breaks
# starting from a certain column number. But it's not. Recall that we have at
# least two quadrants located horizontally and at some point there is a
# transition to another quadrant and these four element parts must be counted
# from a new beginning.
# To determine where the left quadrant ends, look at dat['center'] - the
# coordinates of the "central" cell of the chip are stored there. The number of
# the column indicated there is the last column of the left quadrant.
# It is enough to empirically determine the correspondence of clocks and
# speakers in the new quadrant (even three clocks is enough, since the fourth
# becomes obvious).
# [3, 2, 1, 0] turned out to be the unwritten standard for all the chips studied.
# We're not done with that yet - what matters is how the columns of each
# quadrant end.
# For GW1N-1 dat['center'] = [6, 10]
# From Out[26]: 5: {4, 5, 6, 7, 8, 9}, why is the 5th column responsible not
# for four, but for so many columns, including the end of the quadrant, column
# 9 (we have a 0 based system, remember)?
# We cannot answer this question, but based on observations we can formulate a
# rule: after the tap-column there must be a place for one more column,
# otherwise all columns are assigned to the previous one. Let's see Out[26]:
# 5: {4, 5, 6, 7, 8, 9} can't use column 9 because there is no space for one more
# 8: {7, 8, 9} ok, although not a complete four, we are at a sufficient distance from column 9
# 7: {6, 7, 8, 9} ok, full four
# 6: {5, 6, 7, 8, 9}, can't use column 10 - wrong quadrant
# 'quads': {( 6, 0, 11, 2, 3)}
# 6 - row of spine->tap
# 0, 11 - segment is located between these rows
# 2, 3 - this is the simplest - left and right quadrant numbers.
# The quadrants are numbered like this:
# 1 | 0
# ------ moreover, two-quadrant chips have only quadrants 2 and 3
# 2 | 3
# Determining the boundary between vertical quadrants and even which line
# contains spine->tap is not as easy as determining the vertical boundary
# between segments. This is done empirically by placing a test DFF along the
# column until the moment of changing the row of muxes is caught.
#
# A bit about the nature of Central (Clock?) mux: wherever there is
# ['wire'][38] some clocks are switched somewhere. That is, this is such a huge
# mux spread over the chip, and this is how we describe it for nextpnr - the
# wires of the same name involved in some kind of switching anywhere in the
# chip are combined into one Himbaechel node. Further, when routing, there is
# already a choice of which pip to use and which cell.
# It also follows that for the Himbaechel watch wires should not be mixed
# together with any other wires. At least I came to this conclusion and that
# is why the HCLK wires, which have the same numbers as the watch spines, are
# stored separately.
# dat['CmuxIns'] and 80 - here, the places of entry points into the clock
# system are stored in the form [row, col, wire], that is, in order to send a
# signal for propagation through the global clock network, you need to send it
# to this particular wire in this cell. In most cases it will not be possible
# to connect to this wire as they are basically outputs (IO output, PLL output
# etc).
# Let's look at the dat['CmuxIns'] fragment for GW1N-1. We know that this board
# has an external clock generator connected to the IOR5A pin and this is one of
# the PCLKR clock wires (R is for right here). We see that this is index 47,
# and index 48 belongs to another pin on the same side of the chip. If we
# consider the used fuses from the ['wire'][38] table on the simplest example,
# we will see that 47 corresponds to the PCLKR0 wire, whose index in the
# clknames table (irenames.py) is 127.
# For lack of a better way, we assume that the indexes in the dat['CmuxIns']
# table are the wire numbers in clknames minus 80.
# We check on a couple of other chips and leave it that way. This is neither the
# best nor the worst method in the absence of documentation about the internal
# structure of the chip.
# 38 [-1, -1, -1]
# 39 [-1, -1, -1]
# 40 [-1, -1, -1]
# 41 [-1, -1, -1]
# 42 [-1, -1, -1]
# 43 [11, 10, 38]
# 44 [11, 11, 38]
# 45 [5, 1, 38]
# 46 [7, 1, 38]
# 47 [5, 20, 38] <== IOR5A (because of 38 = F6)
# 48 [7, 20, 38]
# 49 [1, 11, 124]
# 50 [1, 11, 125]
# 51 [6, 20, 124]
_clock_data = {
'GW1N-1': { 'tap_start': [[1, 0, 3, 2], [3, 2, 1, 0]], 'quads': {( 6, 0, 11, 2, 3)}},
'GW1NZ-1': { 'tap_start': [[1, 0, 3, 2], [3, 2, 1, 0]], 'quads': {( 6, 0, 11, 2, 3)}},
'GW1NS-2': { 'tap_start': [[1, 0, 3, 2], [3, 2, 1, 0]], 'quads': {( 6, 0, 15, 2, 3)}},
'GW1N-4': { 'tap_start': [[2, 1, 0, 3], [3, 2, 1, 0]], 'quads': {(10, 0, 20, 2, 3)}},
'GW1NS-4': { 'tap_start': [[2, 1, 0, 3], [3, 2, 1, 0]], 'quads': {(10, 0, 20, 2, 3)}},
'GW1N-9': { 'tap_start': [[3, 2, 1, 0], [3, 2, 1, 0]], 'quads': {( 1, 0, 10, 1, 0), (19, 10, 29, 2, 3)}},
'GW1N-9C': { 'tap_start': [[3, 2, 1, 0], [3, 2, 1, 0]], 'quads': {( 1, 0, 10, 1, 0), (19, 10, 29, 2, 3)}},
'GW2A-18': { 'tap_start': [[3, 2, 1, 0], [3, 2, 1, 0]], 'quads': {(10, 0, 28, 1, 0), (46, 28, 55, 2, 3)}},
'GW2A-18C': { 'tap_start': [[3, 2, 1, 0], [3, 2, 1, 0]], 'quads': {(10, 0, 28, 1, 0), (46, 28, 55, 2, 3)}},
}
def fse_create_clocks(dev, device, dat, fse):
center_col = dat['center'][1] - 1
clkpin_wires = {}
taps = {}
# find center muxes
for clk_idx, row, col, wire_idx in {(i, dat['CmuxIns'][str(i - 80)][0] - 1, dat['CmuxIns'][str(i - 80)][1] - 1, dat['CmuxIns'][str(i - 80)][2]) for i in range(clknumbers['PCLKT0'], clknumbers['PCLKR1'] + 1)}:
if row != -2:
dev.nodes.setdefault(clknames[clk_idx], ("GLOBAL_CLK", set()))[1].add((row, col, wirenames[wire_idx]))
spines = {f'SPINE{i}' for i in range(32)}
for row, rd in enumerate(dev.grid):
for col, rc in enumerate(rd):
for dest, srcs in rc.pure_clock_pips.items():
for src in srcs.keys():
if src in spines and not dest.startswith('GT'):
dev.nodes.setdefault(src, ("GLOBAL_CLK", set()))[1].add((row, col, src))
if dest in spines:
dev.nodes.setdefault(dest, ("GLOBAL_CLK", set()))[1].add((row, col, dest))
for src in { wire for wire in srcs.keys() if wire not in {'VCC', 'VSS'}}:
dev.nodes.setdefault(src, ("GLOBAL_CLK", set()))[1].add((row, col, src))
# GBx0 <- GBOx
for spine_pair in range(4): # GB00/GB40, GB10/GB50, GB20/GB60, GB30/GB70
tap_start = _clock_data[device]['tap_start'][0]
tap_col = tap_start[spine_pair]
last_col = center_col
for col in range(dev.cols):
if col == center_col + 1:
tap_start = _clock_data[device]['tap_start'][1]
tap_col = tap_start[spine_pair] + col
last_col = dev.cols -1
if (col > tap_col + 2) and (tap_col + 4 < last_col):
tap_col += 4
taps.setdefault(spine_pair, {}).setdefault(tap_col, set()).add(col)
for row in range(dev.rows):
for spine_pair, tap_desc in taps.items():
for tap_col, cols in tap_desc.items():
node0_name = f'X{tap_col}Y{row}/GBO0'
dev.nodes.setdefault(node0_name, ("GLOBAL_CLK", set()))[1].add((row, tap_col, 'GBO0'))
node1_name = f'X{tap_col}Y{row}/GBO1'
dev.nodes.setdefault(node1_name, ("GLOBAL_CLK", set()))[1].add((row, tap_col, 'GBO1'))
for col in cols:
dev.nodes.setdefault(node0_name, ("GLOBAL_CLK", set()))[1].add((row, col, f'GB{spine_pair}0'))
dev.nodes.setdefault(node1_name, ("GLOBAL_CLK", set()))[1].add((row, col, f'GB{spine_pair + 4}0'))
# GTx0 <- center row GTx0
for spine_row, start_row, end_row, qno_l, qno_r in _clock_data[device]['quads']:
for spine_pair, tap_desc in taps.items():
for tap_col, cols in tap_desc.items():
if tap_col < center_col:
quad = qno_l
else:
quad = qno_r
for col in cols - {center_col}:
node0_name = f'X{col}Y{spine_row}/GT00'
dev.nodes.setdefault(node0_name, ("GLOBAL_CLK", set()))[1].add((spine_row, col, 'GT00'))
node1_name = f'X{col}Y{spine_row}/GT10'
dev.nodes.setdefault(node1_name, ("GLOBAL_CLK", set()))[1].add((spine_row, col, 'GT10'))
for row in range(start_row, end_row):
if row == spine_row:
if col == tap_col:
spine = quad * 8 + spine_pair
dev.nodes.setdefault(f'SPINE{spine}', ("GLOBAL_CLK", set()))[1].add((row, col, f'SPINE{spine}'))
# XXX skip clock 6 and 7 for now
if spine_pair not in {2, 3}:
dev.nodes.setdefault(f'SPINE{spine + 4}', ("GLOBAL_CLK", set()))[1].add((row, col, f'SPINE{spine + 4}'))
else:
dev.nodes.setdefault(node0_name, ("GLOBAL_CLK", set()))[1].add((row, col, 'GT00'))
dev.nodes.setdefault(node1_name, ("GLOBAL_CLK", set()))[1].add((row, col, 'GT10'))
# These features of IO on the underside of the chip were revealed during
# operation. The first (normal) mode was found in a report by @LoneTech on
# 4/1/2022, when it turned out that the pins on the bottom edge of the GW1NR-9
# require voltages to be applied to strange wires to function.
# The second mode was discovered when the IOLOGIC implementation appeared and
# it turned out that even ODDR does not work without applying other voltages.
# Other applications of these wires are not yet known.
# function 0 - usual io
# function 1 - DDR
def fse_create_bottom_io(dev, device):
if device in {'GW1NS-4', 'GW1N-9C'}:
dev.bottom_io = ('D6', 'C6', [('VSS', 'VSS'), ('VCC', 'VSS')])
elif device in {'GW1N-9'}:
dev.bottom_io = ('A6', 'CE2', [('VSS', 'VSS'), ('VCC', 'VSS')])
else:
dev.bottom_io = ('', '', [])
# It was noticed that the "simplified" IO line matched the BRAM line, whose
# position can be found from dat['grid']. Later this turned out to be not very
# true - for chips other than GW1N-1 IO in these lines may be with reduced
# functionality, or may be normal. It may be worth renaming these lines to
# BRAM-rows, but for now this is an acceptable mechanism for finding
# non-standard IOs, taking into account the chip series, eliminating the
# "magic" coordinates.
def fse_create_simplio_rows(dev, dat):
for row, rd in enumerate(dat['grid']):
if [r for r in rd if r in "Bb"]:
if row > 0:
row -= 1
if row == dev.rows:
row -= 1
dev.simplio_rows.add(row)
def fse_create_tile_types(dev, dat):
type_chars = 'PCMI'
for fn in type_chars:
dev.tile_types[fn] = set()
for row, rd in enumerate(dat['grid']):
for col, fn in enumerate(rd):
if fn in type_chars:
i = row
if i > 0:
i -= 1
if i == dev.rows:
i -= 1
j = col
if j > 0:
j -= 1
if j == dev.cols:
j -= 1
dev.tile_types[fn].add(dev.grid[i][j].ttyp)
def fse_create_diff_types(dev, device):
dev.diff_io_types = ['ELVDS_IBUF', 'ELVDS_OBUF', 'ELVDS_IOBUF', 'ELVDS_TBUF',
'TLVDS_IBUF', 'TLVDS_OBUF', 'TLVDS_IOBUF', 'TLVDS_TBUF']
if device == 'GW1NZ-1':
dev.diff_io_types.remove('TLVDS_IBUF')
dev.diff_io_types.remove('TLVDS_OBUF')
dev.diff_io_types.remove('TLVDS_TBUF')
dev.diff_io_types.remove('TLVDS_IOBUF')
dev.diff_io_types.remove('ELVDS_IOBUF')
elif device == 'GW1N-1':
dev.diff_io_types.remove('TLVDS_OBUF')
dev.diff_io_types.remove('TLVDS_TBUF')
dev.diff_io_types.remove('TLVDS_IOBUF')
dev.diff_io_types.remove('ELVDS_IOBUF')
elif device not in {'GW2A-18', 'GW2A-18C', 'GW1N-4'}:
dev.diff_io_types.remove('TLVDS_IOBUF')
def fse_create_io16(dev, device):
# 16-bit serialization/deserialization primitives occupy two consecutive
# cells. For the top and bottom sides of the chip, this means that the
# "main" cell is located in the column with a lower number, and for the
# sides of the chip - in the row with a lower number.
# But the IDE does not allow placing OSER16/IDES16 in all cells of a
# row/column. Valid ranges are determined by placing the OSER16 primitive
# sequentially (at intervals of 2 since all "master" cells are either odd
# or even) along the side of the chip one at a time and compiling with the
# IDE.
# It is unlikely that someone will need to repeat this work since OSER16 /
# IDES16 were only in three chips and these primitives simply do not exist
# in the latest series.
df = dev.extra_func
if device in {'GW1N-9', 'GW1N-9C'}:
for i in chain(range(1, 8, 2), range(10, 17, 2), range(20, 35, 2), range(38, 45, 2)):
df.setdefault((0, i), {})['io16'] = {'role': 'MAIN', 'pair': (0, 1)}
df.setdefault((0, i + 1), {})['io16'] = {'role': 'AUX', 'pair': (0, -1)}
df.setdefault((dev.rows - 1, i), {})['io16'] = {'role': 'MAIN', 'pair': (0, 1)}
df.setdefault((dev.rows - 1, i + 1), {})['io16'] = {'role': 'AUX', 'pair': (0, -1)}
elif device in {'GW1NS-4'}:
for i in chain(range(1, 8, 2), range(10, 17, 2), range(20, 26, 2), range(28, 35, 2)):
df.setdefault((0, i), {})['io16'] = {'role': 'MAIN', 'pair': (0, 1)}
df.setdefault((0, i + 1), {})['io16'] = {'role': 'AUX', 'pair': (0, -1)}
if i < 17:
df.setdefault((i, dev.cols - 1), {})['io16'] = {'role': 'MAIN', 'pair': (1, 0)}
df.setdefault((i + 1, dev.cols - 1), {})['io16'] = {'role': 'AUX', 'pair': (-1, 0)}
# (osc-type, devices) : ({local-ports}, {aliases})
_osc_ports = {('OSCZ', 'GW1NZ-1'): ({}, {'OSCOUT' : (0, 5, 'OF3'), 'OSCEN': (0, 2, 'A6')}),
('OSCZ', 'GW1NS-4'): ({'OSCOUT': 'Q4', 'OSCEN': 'D6'}, {}),
('OSCF', 'GW1NS-2'): ({}, {'OSCOUT': (10, 19, 'Q4'), 'OSCEN': (13, 19, 'B3')}),
('OSCH', 'GW1N-1'): ({'OSCOUT': 'Q4'}, {}),
('OSC', 'GW1N-4'): ({'OSCOUT': 'Q4'}, {}),
('OSC', 'GW1N-9'): ({'OSCOUT': 'Q4'}, {}),
('OSC', 'GW1N-9C'): ({'OSCOUT': 'Q4'}, {}),
('OSC', 'GW2A-18'): ({'OSCOUT': 'Q4'}, {}),
('OSC', 'GW2A-18C'): ({'OSCOUT': 'Q4'}, {}),
# XXX unsupported boards, pure theorizing
('OSCO', 'GW1N-2'): ({'OSCOUT': 'Q7'}, {'OSCEN': (9, 1, 'B4')}),
('OSCW', 'GW2AN-18'): ({'OSCOUT': 'Q4'}, {}),
}
def fse_create_osc(dev, device, fse):
for row, rd in enumerate(dev.grid):
for col, rc in enumerate(rd):
if 51 in fse[rc.ttyp]['shortval']:
osc_type = list(fse_osc(device, fse, rc.ttyp).keys())[0]
dev.extra_func.setdefault((row, col), {}).update(
{'osc': {'type': osc_type}})
_, aliases = _osc_ports[osc_type, device]
for port, alias in aliases.items():
dev.nodes.setdefault(f'X{col}Y{row}/{port}', (port, {(row, col, port)}))[1].add(alias)
def fse_create_gsr(dev, device):
# Since, in the general case, there are several cells that have a
# ['shortval'][20] table, in this case we do a test example with the GSR
# primitive (Gowin Primitives User Guide.pdf - GSR), connect the GSRI input
# to the button and see how the routing has changed in which of the
# previously found cells.
row, col = (0, 0)
if device in {'GW2A-18', 'GW2A-18C'}:
row, col = (27, 50)
dev.extra_func.setdefault((row, col), {}).update(
{'gsr': {'wire': 'C4'}})
def disable_plls(dev, device):
if device in {'GW2A-18C'}:
# (9, 0) and (9, 55) are the coordinates of cells when trying to place
# a PLL in which the IDE gives an error.
dev.extra_func.setdefault((9, 0), {}).setdefault('disabled', {}).update({'PLL': True})
dev.extra_func.setdefault((9, 55), {}).setdefault('disabled', {}).update({'PLL': True})
def sync_extra_func(dev):
for loc, pips in dev.hclk_pips.items():
row, col = loc
dev.extra_func.setdefault((row, col), {})['hclk_pips'] = pips
def from_fse(device, fse, dat):
dev = Device()
fse_create_simplio_rows(dev, dat)
ttypes = {t for row in fse['header']['grid'][61] for t in row}
tiles = {}
for ttyp in ttypes:
w = fse[ttyp]['width']
h = fse[ttyp]['height']
tile = Tile(w, h, ttyp)
tile.pips = fse_pips(fse, ttyp, 2, wirenames)
tile.clock_pips = fse_pips(fse, ttyp, 38, clknames)
# copy for Himbaechel without hclk
tile.pure_clock_pips = copy.deepcopy(tile.clock_pips)
tile.clock_pips.update(fse_hclk_pips(fse, ttyp, tile.aliases))
tile.alonenode_6 = fse_alonenode(fse, ttyp, 6)
if 5 in fse[ttyp]['shortval']:
tile.bels = fse_luts(fse, ttyp)
if 51 in fse[ttyp]['shortval']:
tile.bels = fse_osc(device, fse, ttyp)
# These are the cell types in which PLLs can be located. To determine,
# we first take the coordinates of the cells with the letters P and p
# from the dat['grid'] table, and then, using these coordinates,
# determine the type from fse['header']['grid'][61][row][col]
if ttyp in [42, 45, 74, 75, 76, 77, 78, 79, 86, 87, 88, 89]:
tile.bels = fse_pll(device, fse, ttyp)
tile.bels.update(fse_iologic(device, fse, ttyp))
tiles[ttyp] = tile
fse_fill_logic_tables(dev, fse)
dev.grid = [[tiles[ttyp] for ttyp in row] for row in fse['header']['grid'][61]]
fse_create_clocks(dev, device, dat, fse)
fse_create_pll_clock_aliases(dev, device)
fse_create_hclk_aliases(dev, device, dat)
fse_create_bottom_io(dev, device)
fse_create_tile_types(dev, dat)
fse_create_diff_types(dev, device)
fse_create_hclk_nodes(dev, device, fse, dat)
fse_create_io16(dev, device)
fse_create_osc(dev, device, fse)
fse_create_gsr(dev, device)
disable_plls(dev, device)
sync_extra_func(dev)
return dev
# get fuses for attr/val set using short/longval table
# returns a bit set
def get_table_fuses(attrs, table):
bits = set()
for key, fuses in table.items():
# all 2/16 "features" must be present to be able to use a set of bits from the record
have_full_key = True
for attrval in key:
if attrval == 0: # no "feature"
break
if attrval > 0:
# this "feature" must present
if attrval not in attrs:
have_full_key = False
break
continue
if attrval < 0:
# this "feature" is set by default and can only be unset
if abs(attrval) in attrs:
have_full_key = False
break
if not have_full_key:
continue
bits.update(fuses)
return bits
# get fuses for attr/val set using shortval table for ttyp
# returns a bit set
def get_shortval_fuses(dev, ttyp, attrs, table_name):
return get_table_fuses(attrs, dev.shortval[ttyp][table_name])
# get fuses for attr/val set using longval table for ttyp
# returns a bit set
def get_longval_fuses(dev, ttyp, attrs, table_name):
return get_table_fuses(attrs, dev.longval[ttyp][table_name])
# get bank fuses
# The table for banks is different in that the first element in it is the
# number of the bank, thus allowing the repetition of elements in the key
def get_bank_fuses(dev, ttyp, attrs, table_name, bank_num):
return get_table_fuses(attrs, {k[1:]:val for k, val in dev.longval[ttyp][table_name].items() if k[0] == bank_num})
# add the attribute/value pair into an set, which is then passed to
# get_longval_fuses() and get_shortval_fuses()
def add_attr_val(dev, logic_table, attrs, attr, val):
for idx, attr_val in enumerate(dev.logicinfo[logic_table]):
if attr_val[0] == attr and attr_val[1] == val:
attrs.add(idx)
break
def get_pins(device):
if device not in {"GW1N-1", "GW1NZ-1", "GW1N-4", "GW1N-9", "GW1NR-9", "GW1N-9C", "GW1NR-9C", "GW1NS-2", "GW1NS-2C", "GW1NS-4", "GW1NSR-4C", "GW2A-18", "GW2A-18C", "GW2AR-18C"}:
raise Exception(f"unsupported device {device}")
pkgs = pindef.all_packages(device)
res = {}
res_bank_pins = {}
for pkg_rec in pkgs.values():
pkg = pkg_rec[0]
if pkg in res:
continue
res[pkg] = pindef.get_pin_locs(device, pkg, pindef.VeryTrue)
res_bank_pins.update(pindef.get_bank_pins(device, pkg))
return (pkgs, res, res_bank_pins)
# returns ({partnumber: (package, device, speed)}, {pins}, {bank_pins})
def json_pinout(device):
if device == "GW1N-1":
pkgs, pins, bank_pins = get_pins("GW1N-1")
return (pkgs, {
"GW1N-1": pins
}, bank_pins)
elif device == "GW1NZ-1":
pkgs, pins, bank_pins = get_pins("GW1NZ-1")
return (pkgs, {
"GW1NZ-1": pins
}, bank_pins)
elif device == "GW1N-4":
pkgs, pins, bank_pins = get_pins("GW1N-4")
return (pkgs, {
"GW1N-4": pins
}, bank_pins)
elif device == "GW1NS-4":
pkgs_sr, pins_sr, bank_pins_sr = get_pins("GW1NSR-4C")
pkgs, pins, bank_pins = get_pins("GW1NS-4")
res = {}
res.update(pkgs)
res.update(pkgs_sr)
res_bank_pins = {}
res_bank_pins.update(bank_pins)
res_bank_pins.update(bank_pins_sr)
return (res, {
"GW1NS-4": pins,
"GW1NSR-4C": pins_sr
}, res_bank_pins)
elif device == "GW1N-9":
pkgs, pins, bank_pins = get_pins("GW1N-9")
pkgs_r, pins_r, bank_pins_r = get_pins("GW1NR-9")
res = {}
res.update(pkgs)
res.update(pkgs_r)
res_bank_pins = {}
res_bank_pins.update(bank_pins)
res_bank_pins.update(bank_pins_r)
return (res, {
"GW1N-9": pins,
"GW1NR-9": pins_r
}, res_bank_pins)
elif device == "GW1N-9C":
pkgs, pins, bank_pins = get_pins("GW1N-9C")
pkgs_r, pins_r, bank_pins_r = get_pins("GW1NR-9C")
res = {}
res.update(pkgs)
res.update(pkgs_r)
res_bank_pins = {}
res_bank_pins.update(bank_pins)
res_bank_pins.update(bank_pins_r)
return (res, {
"GW1N-9C": pins,
"GW1NR-9C": pins_r
}, res_bank_pins)
elif device == "GW1NS-2":
pkgs, pins, bank_pins = get_pins("GW1NS-2")
pkgs_c, pins_c, bank_pins_c = get_pins("GW1NS-2C")
res = {}
res.update(pkgs)
res.update(pkgs_c)
res_bank_pins = {}
res_bank_pins.update(bank_pins)
res_bank_pins.update(bank_pins_c)
return (res, {
"GW1NS-2": pins,
"GW1NS-2C": pins_c
}, res_bank_pins)
elif device == "GW2A-18":
pkgs, pins, bank_pins = get_pins("GW2A-18")
return (pkgs, {
"GW2A-18": pins
}, bank_pins)
elif device == "GW2A-18C":
pkgs, pins, bank_pins = get_pins("GW2A-18C")
pkgs_r, pins_r, bank_pins_r = get_pins("GW2AR-18C")
res = {}
res.update(pkgs)
res.update(pkgs_r)
res_bank_pins = {}
res_bank_pins.update(bank_pins)
res_bank_pins.update(bank_pins_r)
return (res, {
"GW2A-18C": pins,
"GW2AR-18C": pins_r
}, res_bank_pins)
else:
raise Exception("unsupported device")
_pll_inputs = [(5, 'CLKFB'), (6, 'FBDSEL0'), (7, 'FBDSEL1'), (8, 'FBDSEL2'), (9, 'FBDSEL3'),
(10, 'FBDSEL4'), (11, 'FBDSEL5'),
(12, 'IDSEL0'), (13, 'IDSEL1'), (14, 'IDSEL2'), (15, 'IDSEL3'), (16, 'IDSEL4'),
(17, 'IDSEL5'),
(18, 'ODSEL0'), (19, 'ODSEL1'), (20, 'ODSEL2'), (21, 'ODSEL3'), (22, 'ODSEL4'),
(23, 'ODSEL5'), (0, 'RESET'), (1, 'RESET_P'),
(24, 'PSDA0'), (25, 'PSDA1'), (26, 'PSDA2'), (27, 'PSDA3'),
(28, 'DUTYDA0'), (29, 'DUTYDA1'), (30, 'DUTYDA2'), (31, 'DUTYDA3'),
(32, 'FDLY0'), (33, 'FDLY1'), (34, 'FDLY2'), (35, 'FDLY3')]
_pll_outputs = [(0, 'CLKOUT'), (1, 'LOCK'), (2, 'CLKOUTP'), (3, 'CLKOUTD'), (4, 'CLKOUTD3')]
_iologic_inputs = [(0, 'D'), (1, 'D0'), (2, 'D1'), (3, 'D2'), (4, 'D3'), (5, 'D4'),
(6, 'D5'), (7, 'D6'), (8, 'D7'), (9, 'D8'), (10, 'D9'), (11, 'D10'),
(12, 'D11'), (13, 'D12'), (14, 'D13'), (15, 'D14'), (16, 'D15'),
(17, 'CLK'), (18, 'ICLK'), (19, 'PCLK'), (20, 'FCLK'), (21, 'TCLK'),
(22, 'MCLK'), (23, 'SET'), (24, 'RESET'), (25, 'PRESET'), (26, 'CLEAR'),
(27, 'TX'), (28, 'TX0'), (29, 'TX1'), (30, 'TX2'), (31, 'TX3'),
(32, 'WADDR0'), (33, 'WADDR1'), (34, 'WADDR2'), (35, 'RADDR0'),
(36, 'RADDR1'), (37, 'RADDR2'), (38, 'CALIB'), (39, 'DI'), (40, 'SETN'),
(41, 'SDTAP'), (42, 'VALUE'), (43, 'DASEL'), (44, 'DASEL0'), (45, 'DASEL1'),
(46, 'DAADJ'), (47, 'DAADJ0'), (48, 'DAADJ1')]
_iologic_outputs = [(0, 'Q'), (1, 'Q0'), (2, 'Q1'), (3, 'Q2'), (4, 'Q3'), (5, 'Q4'),
(6, 'Q5'), (7, 'Q6'), (8, 'Q7'), (9, 'Q8'), (10, 'Q9'), (11, 'Q10'),
(12, 'Q11'), (13, 'Q12'), (14, 'Q13'), (15, 'Q14'), (16, 'Q15'),
(17, 'DO'), (18, 'DF'), (19, 'LAG'), (20, 'LEAD'), (21, 'DAO')]
_oser16_inputs = [(19, 'PCLK'), (20, 'FCLK'), (25, 'RESET')]
_oser16_fixed_inputs = {'D0': 'A0', 'D1': 'A1', 'D2': 'A2', 'D3': 'A3', 'D4': 'C1',
'D5': 'C0', 'D6': 'D1', 'D7': 'D0', 'D8': 'C3', 'D9': 'C2',
'D10': 'B4', 'D11': 'B5', 'D12': 'A0', 'D13': 'A1', 'D14': 'A2',
'D15': 'A3'}
_oser16_outputs = [(1, 'Q0')]
_ides16_inputs = [(19, 'PCLK'), (20, 'FCLK'), (38, 'CALIB'), (25, 'RESET'), (0, 'D')]
_ides16_fixed_outputs = { 'Q0': 'F2', 'Q1': 'F3', 'Q2': 'F4', 'Q3': 'F5', 'Q4': 'Q0',
'Q5': 'Q1', 'Q6': 'Q2', 'Q7': 'Q3', 'Q8': 'Q4', 'Q9': 'Q5', 'Q10': 'F0',
'Q11': 'F1', 'Q12': 'F2', 'Q13': 'F3', 'Q14': 'F4', 'Q15': 'F5'}
def get_pllout_global_name(row, col, wire, device):
for name, loc in _pll_loc[device].items():
if loc == (row, col, wire):
return name
raise Exception(f"bad PLL output {device} ({row}, {col}){wire}")
def dat_portmap(dat, dev, device):
for row, row_dat in enumerate(dev.grid):
for col, tile in enumerate(row_dat):
for name, bel in tile.bels.items():
if bel.portmap:
# GW2A has same PLL in different rows
if not (name.startswith("RPLLA") and device in {'GW2A-18', 'GW2A-18C'}):
continue
if name.startswith("IOB"):
if row in dev.simplio_rows:
idx = ord(name[-1]) - ord('A')
inp = wirenames[dat['IobufIns'][idx]]
bel.portmap['I'] = inp
out = wirenames[dat['IobufOuts'][idx]]
bel.portmap['O'] = out
oe = wirenames[dat['IobufOes'][idx]]
bel.portmap['OE'] = oe
else:
pin = name[-1]
inp = wirenames[dat[f'Iobuf{pin}Out']]
bel.portmap['O'] = inp
out = wirenames[dat[f'Iobuf{pin}In']]
bel.portmap['I'] = out
oe = wirenames[dat[f'Iobuf{pin}OE']]
bel.portmap['OE'] = oe
if row == dev.rows - 1:
# bottom io
bel.portmap['BOTTOM_IO_PORT_A'] = dev.bottom_io[0]
bel.portmap['BOTTOM_IO_PORT_B'] = dev.bottom_io[1]
elif name.startswith("IOLOGIC"):
buf = name[-1]
for idx, nam in _iologic_inputs:
w_idx = dat[f'Iologic{buf}In'][idx]
if w_idx >= 0:
bel.portmap[nam] = wirenames[w_idx]
elif nam == 'FCLK':
# dummy Input, we'll make a special pips for it
bel.portmap[nam] = "FCLK"
for idx, nam in _iologic_outputs:
w_idx = dat[f'Iologic{buf}Out'][idx]
if w_idx >= 0:
bel.portmap[nam] = wirenames[w_idx]
elif name.startswith("OSER16"):
for idx, nam in _oser16_inputs:
w_idx = dat[f'IologicAIn'][idx]
if w_idx >= 0:
bel.portmap[nam] = wirenames[w_idx]
elif nam == 'FCLK':
# dummy Input, we'll make a special pips for it
bel.portmap[nam] = "FCLK"
for idx, nam in _oser16_outputs:
w_idx = dat[f'IologicAOut'][idx]
if w_idx >= 0:
bel.portmap[nam] = wirenames[w_idx]
bel.portmap.update(_oser16_fixed_inputs)
elif name.startswith("IDES16"):
for idx, nam in _ides16_inputs:
w_idx = dat[f'IologicAIn'][idx]
if w_idx >= 0:
bel.portmap[nam] = wirenames[w_idx]
elif nam == 'FCLK':
# dummy Input, we'll make a special pips for it
bel.portmap[nam] = "FCLK"
bel.portmap.update(_ides16_fixed_outputs)
elif name == 'RPLLA':
# The PllInDlt table seems to indicate in which cell the
# inputs are actually located.
offx = 1
if device in {'GW1N-9C', 'GW1N-9', 'GW2A-18', 'GW2A-18C'}:
# two mirrored PLLs
if col > dat['center'][1] - 1:
offx = -1
for idx, nam in _pll_inputs:
wire = wirenames[dat['PllIn'][idx]]
off = dat['PllInDlt'][idx] * offx
if device in {'GW1NS-2'}:
# NS-2 is a strange thingy
if nam in {'RESET', 'RESET_P', 'IDSEL1', 'IDSEL2', 'ODSEL5'}:
bel.portmap[nam] = f'rPLL{nam}{wire}'
dev.aliases[row, col, f'rPLL{nam}{wire}'] = (9, col, wire)
else:
bel.portmap[nam] = wire
elif off == 0:
bel.portmap[nam] = wire
else:
# not our cell, make an alias
bel.portmap[nam] = f'rPLL{nam}{wire}'
dev.aliases[row, col, f'rPLL{nam}{wire}'] = (row, col + off, wire)
# Himbaechel node
dev.nodes.setdefault(f'X{col}Y{row}/rPLL{nam}{wire}', ("PLL_I", {(row, col, f'rPLL{nam}{wire}')}))[1].add((row, col + off, wire))
for idx, nam in _pll_outputs:
wire = wirenames[dat['PllOut'][idx]]
off = dat['PllOutDlt'][idx] * offx
if off == 0 or device in {'GW1NS-2'}:
bel.portmap[nam] = wire
else:
# not our cell, make an alias
bel.portmap[nam] = f'rPLL{nam}{wire}'
dev.aliases[row, col, f'rPLL{nam}{wire}'] = (row, col + off, wire)
# Himbaechel node
if nam != 'LOCK':
global_name = get_pllout_global_name(row, col + off, wire, device)
else:
global_name = f'X{col}Y{row}/rPLL{nam}{wire}'
dev.nodes.setdefault(global_name, ("PLL_O", set()))[1].update({(row, col, f'rPLL{nam}{wire}'), (row, col + off, wire)})
# clock input
nam = 'CLKIN'
wire = wirenames[dat['PllClkin'][1][0]]
off = dat['PllClkin'][1][1] * offx
if off == 0:
bel.portmap[nam] = wire
else:
# not our cell, make an alias
bel.portmap[nam] = f'rPLL{nam}{wire}'
dev.aliases[row, col, f'rPLL{nam}{wire}'] = (row, col + off, wire)
# Himbaechel node
dev.nodes.setdefault(f'X{col}Y{row}/rPLL{nam}{wire}', ("PLL_I", {(row, col, f'rPLL{nam}{wire}')}))[1].add((row, col + off, wire))
elif name == 'PLLVR':
pll_idx = 0
if col != 27:
pll_idx = 1
for idx, nam in _pll_inputs:
pin_row = dat[f'SpecPll{pll_idx}Ins'][idx * 3 + 0]
wire = wirenames[dat[f'SpecPll{pll_idx}Ins'][idx * 3 + 2]]
if pin_row == 1:
bel.portmap[nam] = wire
else:
# some of the PLLVR inputs are in a special cell
# (9, 37), here we create aliases where the
# destination is the ports of the primitive, but
# you should keep in mind that nextpnr is designed
# so that it will not use such aliases. They have
# to be taken care of separately.
bel.portmap[nam] = f'PLLVR{nam}{wire}'
dev.aliases[row, col, f'PLLVR{nam}{wire}'] = (9, 37, wire)
# Himbaechel node
dev.nodes.setdefault(f'X{col}Y{row}/PLLVR{nam}{wire}', ("PLL_I", {(row, col, f'PLLVR{nam}{wire}')}))[1].add((9, 37, wire))
for idx, nam in _pll_outputs:
wire = wirenames[dat[f'SpecPll{pll_idx}Outs'][idx * 3 + 2]]
bel.portmap[nam] = wire
# Himbaechel node
if nam != 'LOCK':
global_name = get_pllout_global_name(row, col, wire, device)
else:
global_name = f'X{col}Y{row}/PLLVR{nam}{wire}'
dev.nodes.setdefault(global_name, ("PLL_O", set()))[1].update({(row, col, f'PLLVR{nam}{wire}'), (row, col, wire)})
bel.portmap['CLKIN'] = wirenames[124];
reset = wirenames[dat[f'SpecPll{pll_idx}Ins'][0 + 2]]
# VREN pin is placed in another cell
if pll_idx == 0:
vren = 'D0'
else:
vren = 'B0'
bel.portmap['VREN'] = f'PLLVRV{vren}'
dev.aliases[row, col, f'PLLVRV{vren}'] = (0, 37, vren)
# Himbaechel node
dev.nodes.setdefault(f'X{col}Y{row}/PLLVRV{vren}', ("PLL_I", {(row, col, f'PLLVRV{vren}')}))[1].add((0, 37, vren))
if name.startswith('OSC'):
# local ports
local_ports, aliases = _osc_ports[name, device]
bel.portmap.update(local_ports)
for port, alias in aliases.items():
bel.portmap[port] = port
dev.aliases[row, col, port] = alias
def dat_aliases(dat, dev):
for row in dev.grid:
for td in row:
for dest, (src,) in zip(dat['X11s'], dat['X11Ins']):
td.aliases[wirenames[dest]] = wirenames[src]
def tile_bitmap(dev, bitmap, empty=False):
res = {}
y = 0
for idx, row in enumerate(dev.grid):
x=0
for jdx, td in enumerate(row):
w = td.width
h = td.height
tile = bitmap[y:y+h,x:x+w]
if tile.any() or empty:
res[(idx, jdx)] = tile
x+=w
y+=h
return res
def fuse_bitmap(db, bitmap):
res = np.zeros((db.height, db.width), dtype=np.uint8)
y = 0
for idx, row in enumerate(db.grid):
x=0
for jdx, td in enumerate(row):
w = td.width
h = td.height
res[y:y+h,x:x+w] = bitmap[(idx, jdx)]
x+=w
y+=h
return res
def shared2flag(dev):
"Convert mode bits that are shared between bels to flags"
for idx, row in enumerate(dev.grid):
for jdx, td in enumerate(row):
for namea, bela in td.bels.items():
bitsa = bela.mode_bits
for nameb, belb in td.bels.items():
bitsb = belb.mode_bits
common_bits = bitsa & bitsb
if bitsa != bitsb and common_bits:
print(idx, jdx, namea, "and", nameb, "have common bits:", common_bits)
for mode, bits in bela.modes.items():
mode_cb = bits & common_bits
if mode_cb:
bela.flags[mode+"C"] = mode_cb
bits -= mode_cb
for mode, bits in belb.modes.items():
mode_cb = bits & common_bits
if mode_cb:
belb.flags[mode+"C"] = mode_cb
bits -= mode_cb
def get_route_bits(db, row, col):
""" All routing bits for the cell """
bits = set()
for w in db.grid[row][col].pips.values():
for v in w.values():
bits.update(v)
for w in db.grid[row][col].clock_pips.values():
for v in w.values():
bits.update(v)
return bits
uturnlut = {'N': 'S', 'S': 'N', 'E': 'W', 'W': 'E'}
dirlut = {'N': (1, 0),
'E': (0, -1),
'S': (-1, 0),
'W': (0, 1)}
def wire2global(row, col, db, wire):
if wire in {'VCC', 'VSS'}:
return wire
m = re.match(r"([NESW])([128]\d)(\d)", wire)
if not m: # not an inter-tile wire
return f"R{row}C{col}_{wire}"
direction, num, segment = m.groups()
rootrow = row + dirlut[direction][0]*int(segment)
rootcol = col + dirlut[direction][1]*int(segment)
# wires wrap around the edges
# assumes 1-based indexes
if rootrow < 1:
rootrow = 1 - rootrow
direction = uturnlut[direction]
if rootcol < 1:
rootcol = 1 - rootcol
direction = uturnlut[direction]
if rootrow > db.rows:
rootrow = 2*db.rows+1 - rootrow
direction = uturnlut[direction]
if rootcol > db.cols:
rootcol = 2*db.cols+1 - rootcol
direction = uturnlut[direction]
# map cross wires to their origin
#name = diaglut.get(direction+num, direction+num)
return f"R{rootrow}C{rootcol}_{direction}{num}"
def loc2pin_name(db, row, col):
""" returns name like "IOB3" without [A,B,C...]
"""
if row == 0:
side = 'T'
idx = col + 1
elif row == db.rows - 1:
side = 'B'
idx = col + 1
elif col == 0:
side = 'L'
idx = row + 1
else:
side = 'R'
idx = row + 1
return f"IO{side}{idx}"
def loc2bank(db, row, col):
""" returns bank index '0'...'n'
"""
bank = db.corners.get((row, col))
if bank == None:
name = loc2pin_name(db, row, col)
nameA = name + 'A'
if nameA in db.pin_bank:
bank = db.pin_bank[nameA]
else:
bank = db.pin_bank[name + 'B']
return bank
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/chipdb.py
|
chipdb.py
|
import sys
import os
import re
import random
import numpy as np
from itertools import chain, count
import pickle
import gzip
import argparse
import importlib.resources
from contextlib import closing
from apycula import codegen
from apycula import chipdb
from apycula import attrids
from apycula.bslib import read_bitstream
from apycula.wirenames import wirenames
_device = ""
_pinout = ""
_packages = {
'GW1N-1' : 'LQFP144', 'GW1NZ-1' : 'QFN48', 'GW1N-4' : 'PBGA256', 'GW1N-9C' : 'UBGA332',
'GW1N-9' : 'PBGA256', 'GW1NS-4' : 'QFN48', 'GW1NS-2' : 'LQFP144', 'GW2A-18': 'PBGA256',
'GW2A-18C' : 'PBGA256S'
}
# bank iostandards
# XXX default io standard may be board-dependent!
_banks = {'0': "LVCMOS18", '1': "LVCMOS18", '2': "LVCMOS18", '3': "LVCMOS18"}
# bank fuse tables. They are created here from the standard 'longval' because for
# banks the key of these tables starts with the bank number and unpack is not
# called so often that one can make 'right' tables on the fly.
_bank_fuse_tables = {}
# for a given mode returns a mask of zero bits
def zero_bits(mode, all_modes):
res = set()
for m, m_rec in all_modes.items():
if m == mode:
continue
res.update(m_rec.decode_bits)
for flag in m_rec.flags.values():
res.update(flag.mask)
m_mask = set()
for flag in all_modes[mode].flags.values():
m_mask.update(flag.mask)
return res.difference(all_modes[mode].decode_bits).difference(m_mask)
# If the length of the bit pattern is equal, start the comparison with IOBUF
def _io_mode_sort_func(mode):
l = len(mode[1].decode_bits) * 10
if mode[0] == 'IOBUF':
l += 2
elif mode[0] == 'OBUF':
l += 1
return l
#
def get_attr_name(attrname_table, code):
for name, cod in attrname_table.items():
if cod == code:
return name
return ''
# fix names and types of the PLL attributes
# { internal_name: external_name }
_pll_attrs = {
'IDIV' : 'IDIV_SEL',
'IDIVSEL' : 'DYN_IDIV_SEL',
'FDIV' : 'FBDIV_SEL',
'FDIVSEL' : 'DYN_FBDIV_SEL',
'ODIV' : 'ODIV_SEL',
'ODIVSEL' : 'DYN_ODIV_SEL',
'PHASE' : 'PSDA_SEL',
'DUTY' : 'DUTYDA_SEL',
'DPSEL' : 'DYN_DA_EN',
'OPDLY' : 'CLKOUT_DLY_STEP',
'OSDLY' : 'CLKOUTP_DLY_STEP',
'SDIV' : 'DYN_SDIV_SEL',
'CLKOUTDIVSEL' : 'CLKOUTD_SRC',
'CLKOUTDIV3SEL' : 'CLKOUTD3_SRC',
'BYPCK' : 'CLKOUT_BYPASS',
'BYPCKPS' : 'CLKOUTP_BYPASS',
'BYPCKDIV' : 'CLKOUTD_BYPASS',
}
_pll_vals = {
'DYN' : 'true',
'CLKOUTPS' : 'CLKOUTP',
'BYPASS' : 'true',
}
def pll_attrs_refine(in_attrs):
res = set()
for attr, val in in_attrs.items():
#print(attr, val)
if attr not in _pll_attrs.keys():
if attr in ['INSEL', 'FBSEL', 'PWDEN', 'RSTEN', 'CLKOUTDIV3', 'CLKOUTPS']:
res.add(f'{attr}="{[ name for name, vl in attrids.pll_attrvals.items() if vl == val ][0]}"')
continue
attr = _pll_attrs[attr]
if attr in ['CLKOUTP_DLY_STEP', 'CLKOUT_DLY_STEP']:
new_val = val / 50
elif attr in ['PSDA_SEL', 'DUTYDA_SEL']:
new_val = f'"{val:04b}"'
elif attr in ['IDIV_SEL', 'FBDIV_SEL']:
new_val = val - 1
elif attr in ['DYN_SDIV_SEL', 'ODIV_SEL']:
new_val = val
else:
attrvals = [ name for name, vl in attrids.pll_attrvals.items() if vl == val ]
if not attrvals:
raise Exception(f"PLL no {attr} = {val}")
if attrvals[0] in _pll_vals.keys():
new_val = _pll_vals[attrvals[0]]
new_val = f'"{new_val}"'
res.add(f'{attr}={new_val}')
return res
_osc_attrs = {
'MCLKCIB': 'FREQ_DIV',
'OSCREG': 'REGULATOR_EN'
}
def osc_attrs_refine(in_attrs):
res = set()
for attr, val in in_attrs.items():
if attr not in _osc_attrs.keys():
continue
attr = _osc_attrs[attr]
if attr == 'FREQ_DIV':
new_val = val
else:
attrvals = [ name for name, vl in osc_attrvals.items() if vl == val ]
if attrvals[0] in osc_attrvals.keys():
new_val = attrvals[0]
new_val = f'"{new_val}"'
res.add(f'{attr}={new_val}')
if 'MCLKCIB' not in in_attrs.keys() and 'MCLKCIB_EN' in in_attrs.keys():
res.add('FREQ_DIV=128')
return res
# {(REGSET, LSRONMUX, CLKMUX_CLK, SRMODE) : dff_type}
_dff_types = {
('RESET', '', 'SIG', '') : 'DFF',
('RESET', '', 'INV', '') : 'DFFN',
('RESET', 'LSRMUX', 'SIG', 'ASYNC') : 'DFFC',
('RESET', 'LSRMUX', 'INV', 'ASYNC') : 'DFFNC',
('RESET', 'LSRMUX', 'SIG', '') : 'DFFR',
('RESET', 'LSRMUX', 'INV', '') : 'DFFNR',
('SET', 'LSRMUX', 'SIG', 'ASYNC') : 'DFFP',
('SET', 'LSRMUX', 'INV', 'ASYNC') : 'DFFNP',
('SET', 'LSRMUX', 'SIG', '') : 'DFFS',
('SET', 'LSRMUX', 'INV', '') : 'DFFNS',
}
def get_dff_type(dff_idx, in_attrs):
def get_attrval_name(val):
for nam, vl in attrids.cls_attrvals.items():
if vl == val:
return nam
return None
attrs = {}
if 'LSRONMUX' in in_attrs.keys():
attrs['LSRONMUX'] = get_attrval_name(in_attrs['LSRONMUX'])
else:
attrs['LSRONMUX'] = ''
if 'CLKMUX_CLK' in in_attrs.keys():
attrs['CLKMUX_CLK'] = get_attrval_name(in_attrs['CLKMUX_CLK'])
else:
attrs['CLKMUX_CLK'] = 'SIG'
if 'SRMODE' in in_attrs.keys():
attrs['SRMODE'] = get_attrval_name(in_attrs['SRMODE'])
else:
attrs['SRMODE'] = ''
if f'REG{dff_idx % 2}_REGSET' in in_attrs.keys():
attrs['REGSET'] = get_attrval_name(in_attrs[f'REG{dff_idx % 2}_REGSET'])
else:
attrs['REGSET'] = 'SET'
return _dff_types.get((attrs['REGSET'], attrs['LSRONMUX'], attrs['CLKMUX_CLK'], attrs['SRMODE']))
# parse attributes and values use 'logicinfo' table
# returns {attr: value}
# attribute names are decoded with the attribute table, but the values are returned in raw form
def parse_attrvals(tile, logicinfo_table, fuse_table, attrname_table):
def is_neg_key(key):
for k in key:
if k < 0:
return True
return False
def is_pos_key(key):
return not is_neg_key(key)
def get_positive(av):
return {a for a in av if a > 0}
def get_negative(av):
return {abs(a) for a in av if a < 0}
res = {}
set_mask = set()
zero_mask = set()
# collect masks
for av, bits in fuse_table.items():
if is_neg_key(av):
zero_mask.update(bits)
else:
set_mask.update(bits)
set_bits = {(row, col) for row, col in set_mask if tile[row][col] == 1}
neg_bits = {(row, col) for row, col in zero_mask if tile[row][col] == 1}
# find candidates from fuse table
# the set bits are more unique
attrvals = set()
cnd = {av: bits for av, bits in fuse_table.items() if is_pos_key(av) and bits.issubset(set_bits)}
for av, bits in cnd.items():
keep = True
for bt in cnd.values():
if bits != bt and bits.issubset(bt):
keep = False
break
if keep:
clean_av = get_positive(av)
attrvals.update(clean_av) # set attributes
for idx in clean_av:
attr, val = logicinfo_table[idx]
res[get_attr_name(attrname_table, attr)] = val
# records with a negative keys and used fuses
neg_attrvals = set()
ignore_attrs = set()
cnd = {av: bits for av, bits in fuse_table.items() if is_neg_key(av) and bits.issubset(neg_bits)}
for av, bits in cnd.items():
keep = True
for bt in cnd.values():
if bits != bt and bits.issubset(bt):
keep = False
break
for idx in av:
attr, _ = logicinfo_table[idx]
if attr in res.keys():
keep = False
break
if keep:
neg_attrvals.update(get_positive(av))
ignore_attrs.update(get_negative(av))
for idx in neg_attrvals:
attr, val = logicinfo_table[idx]
res[get_attr_name(attrname_table, attr)] = val
# records with a negative keys and unused fuses
cnd = {av for av, bits in fuse_table.items() if is_neg_key(av) and not bits.issubset(neg_bits)}
for av in cnd:
keep = True
for idx in get_negative(av):
if idx in ignore_attrs or not get_positive(av).issubset(attrvals):
keep = False
break
if keep:
for idx in get_negative(av):
attr, val = logicinfo_table[idx]
res[get_attr_name(attrname_table, attr)] = val
return res
# { (row, col, type) : idx}
# type 'A'| 'B'
_pll_cells = {}
# returns the A cell of the PLL
def get_pll_A(db, row, col, typ):
if typ == 'B':
if _device in {"GW1N-9C", "GW1N-9"}:
if col > 28:
col = db.cols - 1
else:
col = 0
else:
col -= 1
return row, col, 'A'
_iologic_mode = {
'MODDRX2': 'OSER4', 'ODDRX2': 'OSER4',
'MODDRX4': 'OSER8', 'ODDRX4': 'OSER8',
'MODDRX5': 'OSER10', 'ODDRX5': 'OSER10',
'VIDEORX': 'OVIDEO', 'ODDRX8': 'OSER16',
'MIDDRX2': 'IDES4', 'IDDRX2': 'IDES4',
'MIDDRX4': 'IDES8', 'IDDRX4': 'IDES8',
'MIDDRX5': 'IDES10', 'IDDRX5': 'IDES10',
'IDDRX8': 'IDES16',
}
# noiostd --- this is the case when the function is called
# with iostd by default, e.g. from the clock fuzzer
# With normal gowin_unpack io standard is determined first and it is known.
# (bels, pips, clock_pips)
def parse_tile_(db, row, col, tile, default=True, noalias=False, noiostd = True):
if not _bank_fuse_tables:
# create bank fuse table
for ttyp in db.longval.keys():
if 'BANK' in db.longval[ttyp].keys():
for key, val in db.longval[ttyp]['BANK'].items():
_bank_fuse_tables.setdefault(ttyp, {}).setdefault(f'BANK{key[0]}', {})[key[1:]] = val
# TLVDS takes two BUF bels, so skip the B bels.
skip_bels = set()
#print((row, col))
tiledata = db.grid[row][col]
clock_pips = {}
bels = {}
for name, bel in tiledata.bels.items():
if name.startswith("RPLL"):
idx = _pll_cells.setdefault(get_pll_A(db, row, col, name[4]), len(_pll_cells))
modes = { f'DEVICE="{_device}"' }
if 'PLL' in db.shortval[tiledata.ttyp].keys():
attrvals = pll_attrs_refine(parse_attrvals(tile, db.logicinfo['PLL'], db.shortval[tiledata.ttyp]['PLL'], attrids.pll_attrids))
for attrval in attrvals:
modes.add(attrval)
if modes:
bels[f'{name}{idx}'] = modes
continue
if name == "PLLVR":
idx = _pll_cells.setdefault(get_pll_A(db, row, col, 'A'), len(_pll_cells))
attrvals = pll_attrs_refine(parse_attrvals(tile, db.logicinfo['PLL'], db.shortval[tiledata.ttyp]['PLL'], attrids.pll_attrids))
modes = { f'DEVICE="{_device}"' }
for attrval in attrvals:
modes.add(attrval)
if modes:
bels[f'{name}{idx}'] = modes
continue
if name.startswith("OSC"):
attrvals = osc_attrs_refine(parse_attrvals(tile, db.logicinfo['OSC'], db.shortval[tiledata.ttyp]['OSC'], attrids.osc_attrids))
modes = set()
for attrval in attrvals:
modes.add(attrval)
if modes:
bels[name] = modes
continue
if name.startswith("IOLOGIC"):
idx = name[-1]
attrvals = parse_attrvals(tile, db.logicinfo['IOLOGIC'], db.shortval[tiledata.ttyp][f'IOLOGIC{idx}'], attrids.iologic_attrids)
#print(row, col, attrvals)
if not attrvals:
continue
if 'OUTMODE' in attrvals.keys():
# XXX skip oddr
if attrvals['OUTMODE'] in {attrids.iologic_attrvals['MODDRX1'], attrids.iologic_attrvals['ODDRX1']}:
if 'LSROMUX_0' in attrvals.keys():
bels.setdefault(name, set()).add(f"MODE=ODDRC")
else:
bels.setdefault(name, set()).add(f"MODE=ODDR")
continue
# skip aux cells
if attrvals['OUTMODE'] == attrids.iologic_attrvals['DDRENABLE']:
continue
if attrids.iologic_num2val[attrvals['OUTMODE']] in _iologic_mode.keys():
bels.setdefault(name, set()).add(f"MODE={_iologic_mode[attrids.iologic_num2val[attrvals['OUTMODE']]]}")
elif 'INMODE' in attrvals.keys():
if attrvals['INMODE'] in {attrids.iologic_attrvals['MIDDRX1'], attrids.iologic_attrvals['IDDRX1']}:
if 'LSRIMUX_0' in attrvals.keys():
bels.setdefault(name, set()).add(f"MODE=IDDRC")
else:
bels.setdefault(name, set()).add(f"MODE=IDDR")
continue
# skip aux cells
if attrvals['INMODE'] == attrids.iologic_attrvals['DDRENABLE']:
continue
if attrids.iologic_num2val[attrvals['INMODE']] in _iologic_mode.keys():
in_mode = _iologic_mode[attrids.iologic_num2val[attrvals['INMODE']]]
if in_mode == 'OVIDEO':
in_mode = 'IVIDEO'
bels.setdefault(name, set()).add(f"MODE={in_mode}")
else:
continue
if 'CLKODDRMUX_ECLK' in attrvals.keys():
bels.setdefault(name, set()).add(f"CLKODDRMUX_ECLK={attrids.iologic_num2val[attrvals['CLKODDRMUX_ECLK']]}")
if name.startswith("DFF"):
idx = int(name[3])
attrvals = parse_attrvals(tile, db.logicinfo['SLICE'], db.shortval[tiledata.ttyp][f'CLS{idx // 2}'], attrids.cls_attrids)
#print(row, col, attrvals)
# skip ALU and unsupported modes
if attrvals.get('MODE') == attrids.cls_attrvals['SSRAM']:
continue
dff_type = get_dff_type(idx, attrvals)
if dff_type:
bels[f'{name}'] = {dff_type}
continue
if name.startswith("IOB"):
idx = name[-1]
attrvals = parse_attrvals(tile, db.logicinfo['IOB'], db.longval[tiledata.ttyp][f'IOB{idx}'], attrids.iob_attrids)
#print(row, col, attrvals)
try: # we can ask for invalid pin here because the IOBs share some stuff
bank = chipdb.loc2bank(db, row, col)
except KeyError:
bank = None
if attrvals:
mode = 'IBUF'
if attrvals.get('PERSISTENT', None) == attrids.iob_attrvals['OFF']:
mode = 'IOBUF'
elif 'ODMUX' in attrvals.keys() or 'ODMUX_1' in attrvals.keys():
mode = 'OBUF'
# Z-1 row 6
if _device in {'GW1NZ-1', 'GW1N-1'} and row == 5:
mode = 'IOBUF'
if 'LVDS_OUT' in attrvals.keys():
if mode == 'IOBUF':
mode = 'TBUF'
mode = f'TLVDS_{mode}'
# skip B bel
skip_bels.update({name[:-1] + 'B'})
elif idx == 'B' and 'DRIVE' not in attrvals.keys() and 'IO_TYPE' in attrvals.keys():
mode = f'ELVDS_{mode}'
# skip B bel
skip_bels.update({name})
elif 'IOBUF_MIPI_LP' in attrvals.keys():
mode = f'ELVDS_{mode}'
# skip B bel
skip_bels.update({name[:-1] + 'B'})
bels.setdefault(name, set()).add(mode)
if name.startswith("BANK"):
attrvals = parse_attrvals(tile, db.logicinfo['IOB'], _bank_fuse_tables[tiledata.ttyp][name], attrids.iob_attrids)
for a, v in attrvals.items():
bels.setdefault(name, set()).add(f'{a}={attrids.iob_num2val[v]}')
if name.startswith("ALU"):
idx = int(name[3])
attrvals = parse_attrvals(tile, db.logicinfo['SLICE'], db.shortval[tiledata.ttyp][f'CLS{idx // 2}'], attrids.cls_attrids)
# skip ALU and unsupported modes
if attrvals.get('MODE') != attrids.cls_attrvals['ALU']:
continue
bels[name] = {"C2L"}
mode_bits = {(row, col)
for row, col in bel.mode_bits
if tile[row][col] == 1}
for mode, bits in bel.modes.items():
if bits == mode_bits and (default or bits):
bels[name] = {mode}
else:
mode_bits = {(row, col)
for row, col in bel.mode_bits
if tile[row][col] == 1}
#print(name, sorted(bel.mode_bits))
#print("read mode:", sorted(mode_bits))
for mode, bits in bel.modes.items():
#print(mode, sorted(bits))
if bits == mode_bits and (default or bits):
bels.setdefault(name, set()).add(mode)
# simple flags
for flag, bits in bel.flags.items():
used_bits = {tile[row][col] for row, col in bits}
if all(used_bits):
if name == "RAM16" and not name in bels:
continue
bels.setdefault(name, set()).add(flag)
# revert BUFS flags
if name.startswith('BUFS'):
flags = bels.get(name, set()) ^ {'R', 'L'}
if flags:
num = name[4:]
half = 'T'
if row != 0:
half = 'B'
for qd in flags:
clock_pips[f'LWSPINE{half}{qd}{num}'] = f'LW{half}{num}'
#print("flags:", sorted(bels.get(name, set())))
pips = {}
for dest, srcs in tiledata.pips.items():
pip_bits = set().union(*srcs.values())
used_bits = {(row, col)
for row, col in pip_bits
if tile[row][col] == 1}
for src, bits in srcs.items():
# optionally ignore the defautl set() state
if bits == used_bits and (default or bits):
pips[dest] = src
for dest, srcs in tiledata.clock_pips.items():
pip_bits = set().union(*srcs.values())
used_bits = {(row, col)
for row, col in pip_bits
if tile[row][col] == 1}
for src, bits in srcs.items():
# only report connection aliased to by a spine
# HCLKs are also switched here, so for now we are also considering SPINExx type wires
if bits == used_bits and (noalias or (row, col, src) in db.aliases or (src.startswith('SPINE') and dest.startswith('SPINE'))):
clock_pips[dest] = src
# elvds IO uses the B bel bits
for name in skip_bels:
bel_a = bels[f'{name[:-1]}A']
if not bel_a.intersection({'ELVDS_IBUF', 'ELVDS_OBUF', 'ELVDS_IOBUF', 'ELVDS_TBUF',
'TLVDS_IBUF', 'TLVDS_OBUF', 'TLVDS_IOBUF', 'TLVDS_TBUF'}):
mode = bels[name].intersection({'ELVDS_IBUF', 'ELVDS_OBUF', 'ELVDS_IOBUF', 'ELVDS_TBUF'})
if mode:
old_mode = bel_a.intersection({'IBUF', 'OBUF', 'IOBUF', 'TBUF'})
bel_a -= old_mode
bel_a.update(mode)
return {name: bel for name, bel in bels.items() if name not in skip_bels}, pips, clock_pips
dffmap = {
"DFF": None,
"DFFN": None,
"DFFS": "SET",
"DFFR": "RESET",
"DFFP": "PRESET",
"DFFC": "CLEAR",
"DFFNS": "SET",
"DFFNR": "RESET",
"DFFNP": "PRESET",
"DFFNC": "CLEAR",
}
iobmap = {
"IBUF": {"wires": ["O"], "inputs": ["I"]},
"OBUF": {"wires": ["I"], "outputs": ["O"]},
"TBUF": {"wires": ["I", "OE"], "outputs": ["O"]},
"IOBUF": {"wires": ["I", "O", "OE"], "inouts": ["IO"]},
"TLVDS_OBUF": {"wires": ["I"], "outputs": ["O", "OB"]},
"TLVDS_TBUF": {"wires": ["I", "OE"], "outputs": ["O", "OB"]},
"TLVDS_IBUF": {"wires": ["O"], "inputs": ["I", "IB"]},
"ELVDS_OBUF": {"wires": ["I"], "outputs": ["O", "OB"]},
"ELVDS_TBUF": {"wires": ["I", "OE"], "outputs": ["O", "OB"]},
"ELVDS_IBUF": {"wires": ["O"], "inputs": ["I", "IB"]},
"ELVDS_IOBUF": {"wires": ["I", "O", "OE"], "inouts": ["IO", "IOB"]},
}
# OE -> OEN
def portname(n):
if n == "OE":
return "OEN"
return n
def make_muxes(row, col, idx, db, mod):
name = f"R{row}C{col}_MUX2_LUT50"
if name in mod.primitives.keys():
return
# one MUX8
if col < db.cols :
name = f"R{row}C{col}_MUX2_LUT80"
mux2 = codegen.Primitive("MUX2", name)
mux2.portmap['I0'] = f"R{row}C{col + 1}_OF3"
mux2.portmap['I1'] = f"R{row}C{col}_OF3"
mux2.portmap['O'] = f"R{row}C{col}_OF7"
mux2.portmap['S0'] = f"R{row}C{col}_SEL7"
mod.wires.update(mux2.portmap.values())
mod.primitives[name] = mux2
# one MUX7
name = f"R{row}C{col}_MUX2_LUT70"
mux2 = codegen.Primitive("MUX2", name)
mux2.portmap['I0'] = f"R{row}C{col}_OF5"
mux2.portmap['I1'] = f"R{row}C{col}_OF1"
mux2.portmap['O'] = f"R{row}C{col}_OF3"
mux2.portmap['S0'] = f"R{row}C{col}_SEL3"
mod.wires.update(mux2.portmap.values())
mod.primitives[name] = mux2
# two MUX6
for i in range(2):
name = f"R{row}C{col}_MUX2_LUT6{i}"
mux2 = codegen.Primitive("MUX2", name)
mux2.portmap['I0'] = f"R{row}C{col}_OF{i * 4 + 2}"
mux2.portmap['I1'] = f"R{row}C{col}_OF{i * 4}"
mux2.portmap['O'] = f"R{row}C{col}_OF{i * 4 + 1}"
mux2.portmap['S0'] = f"R{row}C{col}_SEL{i * 4 + 1}"
mod.wires.update(mux2.portmap.values())
mod.primitives[name] = mux2
# four MUX5
for i in range(4):
name = f"R{row}C{col}_MUX2_LUT5{i}"
mux2 = codegen.Primitive("MUX2", name)
mux2.portmap['I0'] = f"R{row}C{col}_F{i * 2}"
mux2.portmap['I1'] = f"R{row}C{col}_F{i * 2 + 1}"
mux2.portmap['O'] = f"R{row}C{col}_OF{i * 2}"
mux2.portmap['S0'] = f"R{row}C{col}_SEL{i * 2}"
mod.wires.update(mux2.portmap.values())
mod.primitives[name] = mux2
_alu_re = re.compile(r"ALU(\w*)")
def removeLUTs(bels):
bels_to_remove = []
for bel in bels:
match = _alu_re.match(bel)
if match:
bels_to_remove.append(f"LUT{match.group(1)}")
for bel in bels_to_remove:
bels.pop(bel, None)
def removeALUs(bels):
bels_to_remove = []
for bel in bels:
match = _alu_re.match(bel)
if match:
bels_to_remove.append(match.group(0))
for bel in bels_to_remove:
bels.pop(bel, None)
def ram16_remove_bels(bels):
bels_to_remove = []
for bel in bels:
if bel == "RAM16":
bels_to_remove.extend(f"LUT{x}" for x in range(6))
bels_to_remove.extend(f"DFF{x}" for x in range(4, 6))
for bel in bels_to_remove:
bels.pop(bel, None)
_iologic_bels = ['IOLOGICA', 'IOLOGICB', 'ODDRA', 'ODDRB']
def move_iologic(bels):
res = []
for iol_bel in _iologic_bels:
if iol_bel in bels.keys():
res.append((iol_bel, bels[iol_bel]))
res += [(bel, flags) for bel, flags in bels.items() if bel not in _iologic_bels]
return res
def disable_unused_pll_ports(pll):
if 'DYN_DA_EN' not in pll.params:
for n in range(0, 4):
if f'PSDA{n}' in pll.portmap:
del pll.portmap[f'PSDA{n}']
del pll.portmap[f'DUTYDA{n}']
del pll.portmap[f'FDLY{n}']
if 'DYN_IDIV_SEL' not in pll.params:
for n in range(0, 6):
if f'IDSEL{n}' in pll.portmap:
del pll.portmap[f'IDSEL{n}']
if 'DYN_FBDIV_SEL' not in pll.params:
for n in range(0, 6):
if f'FBDSEL{n}' in pll.portmap:
del pll.portmap[f'FBDSEL{n}']
if 'DYN_ODIV_SEL' not in pll.params:
for n in range(0, 6):
if f'ODSEL{n}' in pll.portmap:
del pll.portmap[f'ODSEL{n}']
if 'PWDEN' in pll.params:
if pll.params['PWDEN'] == 'DISABLE':
if 'RESET_P' in pll.portmap:
del pll.portmap['RESET_P']
del pll.params['PWDEN']
if 'RSTEN' in pll.params:
if pll.params['RSTEN'] == 'DISABLE':
if 'RESET' in pll.portmap:
del pll.portmap['RESET']
del pll.params['RSTEN']
if 'CLKOUTDIV3' in pll.params:
if pll.params['CLKOUTDIV3'] == 'DISABLE':
if 'CLKOUTD3' in pll.portmap:
del pll.portmap['CLKOUTD3']
del pll.params['CLKOUTDIV3']
if 'CLKOUTDIV' in pll.params:
if pll.params['CLKOUTDIV'] == 'DISABLE':
if 'CLKOUTD' in pll.portmap:
del pll.portmap['CLKOUTD']
del pll.params['CLKOUTDIV']
if 'CLKOUTPS' in pll.params:
if pll.params['CLKOUTPS'] == 'DISABLE':
if 'CLKOUTP' in pll.portmap:
del pll.portmap['CLKOUTP']
del pll.params['CLKOUTPS']
_tbrlre = re.compile(r"IO([TBRL])(\d+)(\w)")
def tbrl2rc(db, loc):
side, num, bel_idx = _tbrlre.match(loc).groups()
if side == 'T':
row = 0
col = int(num) - 1
elif side == 'B':
row = db.rows - 1
col = int(num) - 1
elif side == 'L':
row = int(num) - 1
col = 0
elif side == 'R':
row = int(num) - 1
col = db.cols - 1
return (row, col, bel_idx)
def find_pll_in_pin(db, pll):
locs = [loc for (loc, cfgs) in _pinout.values() if 'RPLL_T_IN' in cfgs or 'LRPLL_T_IN' in cfgs]
if not locs:
raise Exception(f"No [RL]PLL_T_IN pin in the current package")
row, col, bel_idx = tbrl2rc(db, locs[0])
wire = db.grid[row][col].bels[f'IOB{bel_idx}'].portmap['O']
pll.portmap['CLKIN'] = f'R{row + 1}C{col + 1}_{wire}'
def modify_pll_inputs(db, pll):
if 'INSEL' in pll.params.keys():
insel = pll.params['INSEL']
if insel != 'CLKIN1':
# pin
if insel == 'CLKIN0':
find_pll_in_pin(db, pll)
else:
if 'CLKIN' in pll.portmap:
del pll.portmap['CLKIN']
del pll.params['INSEL']
if 'FBSEL' in pll.params.keys():
fbsel = pll.params['FBSEL']
if fbsel == 'CLKFB3':
# internal
pll.params['CLKFB_SEL'] = '"internal"'
if 'CLKFB' in pll.portmap:
del pll.portmap['CLKFB']
elif fbsel == 'CLKFB0':
# external CLK2
pll.params['CLKFB_SEL'] = '"external"'
elif fbsel == 'CLKFB2':
# external pin
pll.params['CLKFB_SEL'] = '"external"'
# XXX find pin
del pll.params['FBSEL']
_iologic_ports = {
'ODDR' : {'D0': 'D0', 'D1': 'D1', 'Q0': 'Q0', 'Q1': 'Q1', 'CLK': 'CLK'},
'ODDRC' : {'D0': 'D0', 'D1': 'D1', 'Q0': 'Q0', 'Q1': 'Q1', 'CLK': 'CLK', 'CLEAR': 'CLEAR'},
'OSER4': {'D0': 'D0', 'D1': 'D1', 'D2': 'D2', 'D3': 'D3',
'Q0': 'Q0', 'Q1': 'Q1', 'RESET': 'RESET', 'TX0': 'TX0',
'TX1': 'TX1', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'OSER8': {'D0': 'D0', 'D1': 'D1', 'D2': 'D2', 'D3': 'D3',
'D4': 'D4', 'D5': 'D5', 'D6': 'D6', 'D7': 'D7',
'Q0': 'Q0', 'Q1': 'Q1', 'RESET': 'RESET', 'TX0': 'TX90',
'TX1': 'TX1', 'TX2': 'TX2', 'TX3': 'TX3',
'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'OVIDEO':{'D0': 'D0', 'D1': 'D1', 'D2': 'D2', 'D3': 'D3',
'D4': 'D4', 'D5': 'D5', 'D6': 'D6', 'Q': 'Q',
'RESET': 'RESET', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'OSER10': {'D0': 'D0', 'D1': 'D1', 'D2': 'D2', 'D3': 'D3',
'D4': 'D4', 'D5': 'D5', 'D6': 'D6', 'D7': 'D7', 'D8': 'D8', 'D9': 'D9',
'Q': 'Q', 'RESET': 'RESET', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'OSER16': {'D0': 'A0', 'D1': 'A1', 'D2': 'A2', 'D3': 'A3',
'D4': 'C1', 'D5': 'C0', 'D6': 'D1', 'D7': 'D0', 'D8': 'C3', 'D9': 'C2',
'D10': 'B4', 'D11': 'B5', 'D12': 'A0', 'D13': 'A1', 'D14': 'A2',
'D15': 'A3',},
'IDDR' : {'D': 'D', 'Q8': 'Q0', 'Q9': 'Q1', 'CLK': 'CLK'},
'IDDRC' : {'D': 'D', 'Q8': 'Q0', 'Q9': 'Q1', 'CLK': 'CLK', 'CLEAR': 'CLEAR'},
'IDES4': {'D': 'D', 'Q6': 'Q0', 'Q7': 'Q1', 'Q8': 'Q2', 'Q9': 'Q3',
'RESET': 'RESET', 'CALIB': 'CALIB', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'IDES8': {'D': 'D', 'Q2': 'Q0', 'Q3': 'Q1', 'Q4': 'Q2', 'Q5': 'Q3', 'Q6': 'Q4',
'Q7': 'Q5', 'Q8': 'Q6', 'Q9': 'Q7',
'RESET': 'RESET', 'CALIB': 'CALIB', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'IVIDEO': {'D': 'D', 'Q3': 'Q0', 'Q4': 'Q1', 'Q5': 'Q2', 'Q6': 'Q3', 'Q7': 'Q4',
'Q8': 'Q5', 'Q9': 'Q6',
'RESET': 'RESET', 'CALIB': 'CALIB', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'IDES10': {'D': 'D', 'Q0': 'Q0', 'Q1': 'Q1', 'Q2': 'Q2', 'Q3': 'Q3', 'Q4': 'Q4',
'Q5': 'Q5', 'Q6': 'Q6', 'Q7': 'Q7', 'Q8': 'Q8', 'Q9': 'Q9',
'RESET': 'RESET', 'CALIB': 'CALIB', 'PCLK': 'PCLK', 'FCLK': 'FCLK'},
'IDES16': {'Q0': 'F2', 'Q1': 'F3', 'Q2': 'F4', 'Q3': 'F5', 'Q4': 'Q0', 'Q5': 'Q1',
'Q6': 'Q2', 'Q7': 'Q3', 'Q8': 'Q4', 'Q9': 'Q5', 'Q10': 'F0',
'Q11': 'F1', 'Q12': 'F2', 'Q13': 'F3', 'Q14': 'F4', 'Q15': 'F5' },
}
def iologic_ports_by_type(typ, portmap):
if typ not in {'IDES16', 'OSER16'}:
return { (_iologic_ports[typ][port], wire) for port, wire in portmap.items() if port in _iologic_ports[typ].keys() }
elif typ in {'OSER16', 'IDES16'}:
ports = { (port, wire) for port, wire in _iologic_ports[typ].items()}
ports.add(('RESET', portmap['RESET']))
ports.add(('PCLK', portmap['PCLK']))
ports.add(('FCLK', portmap['FCLK']))
if typ == 'IDES16':
ports.add(('CALIB', portmap['CALIB']))
ports.add(('D', portmap['D']))
else:
ports.add(('Q', portmap['Q']))
return ports
_sides = "AB"
def tile2verilog(dbrow, dbcol, bels, pips, clock_pips, mod, cst, db):
# db is 0-based, floorplanner is 1-based
row = dbrow+1
col = dbcol+1
aliases = db.grid[dbrow][dbcol].aliases
for dest, src in chain(pips.items(), aliases.items(), clock_pips.items()):
srcg = chipdb.wire2global(row, col, db, src)
destg = chipdb.wire2global(row, col, db, dest)
mod.wires.update({srcg, destg})
mod.assigns.append((destg, srcg))
belre = re.compile(r"(IOB|LUT|DFF|BANK|CFG|ALU|RAM16|ODDR|OSC[ZFHWO]?|BUFS|RPLL[AB]|PLLVR|IOLOGIC)(\w*)")
bels_items = move_iologic(bels)
iologic_detected = set()
disable_oddr = False
for bel, flags in bels_items:
typ, idx = belre.match(bel).groups()
if typ == "LUT":
val = 0xffff - sum(1<<f for f in flags)
if val == 0:
mod.assigns.append((f"R{row}C{col}_F{idx}", "VSS"))
else:
name = f"R{row}C{col}_LUT4_{idx}"
lut = codegen.Primitive("LUT4", name)
lut.params["INIT"] = f"16'h{val:04x}"
lut.portmap['F'] = f"R{row}C{col}_F{idx}"
lut.portmap['I0'] = f"R{row}C{col}_A{idx}"
lut.portmap['I1'] = f"R{row}C{col}_B{idx}"
lut.portmap['I2'] = f"R{row}C{col}_C{idx}"
lut.portmap['I3'] = f"R{row}C{col}_D{idx}"
mod.wires.update(lut.portmap.values())
mod.primitives[name] = lut
cst.cells[name] = (row, col, int(idx) // 2, _sides[int(idx) % 2])
make_muxes(row, col, idx, db, mod)
elif typ.startswith("IOLOGIC"):
iologic_detected.add(idx)
iol_mode = 'IVIDEO' #XXX
disable_oddr = True
eclk = 'HCLK0'
iol_params = {}
for paramval in flags:
param, _, val = paramval.partition('=')
if param == 'MODE':
iol_mode = val
if val == 'OSER4':
disable_oddr = False
continue
if param == 'CLKODDRMUX_ECLK':
eclk == val
continue
if param == 'CLKIDDRMUX_ECLK':
eclk == val
continue
iol_params[param] = val
name = f"R{row}C{col}_{iol_mode}_{idx}"
iol = mod.primitives.setdefault(name, codegen.Primitive(iol_mode, name))
iol.params.update(iol_params)
iol_oser = iol_mode in {'ODDR', 'ODDRC', 'OSER4', 'OVIDEO', 'OSER8', 'OSER10', 'OSER16'}
portmap = db.grid[dbrow][dbcol].bels[bel].portmap
for port, wname in iologic_ports_by_type(iol_mode, portmap):
if iol_oser:
if port in {'Q', 'Q0', 'Q1'}:
if port == 'Q1':
iol.portmap[port] = f"R{row}C{col}_{portmap['TX0']}_IOL"
else:
iol.portmap[port] = f"R{row}C{col}_{portmap['D0']}_IOL"
elif port == 'FCLK':
wname = eclk
if eclk == 'HCLK0' and _device in {'GW1N-1'}:
wname = 'CLK2'
iol.portmap[port] = f"R{row}C{col}_{wname}"
else:
if iol_mode != 'OSER16' or port not in {'D12', 'D13', 'D14', 'D15'}:
iol.portmap[port] = f"R{row}C{col}_{wname}"
else:
if row == 1 or row == db.rows:
iol.portmap[port] = f"R{row}C{col + 1}_{wname}"
else:
iol.portmap[port] = f"R{row + 1}C{col}_{wname}"
else: # IDES
if port in {'D'}:
iol.portmap[port] = f"R{row}C{col}_{portmap['D']}_IOL"
else:
if iol_mode != 'IDES16':
iol.portmap[port] = f"R{row}C{col}_{wname}"
else:
if port not in {'Q0', 'Q1', 'Q2', 'Q3'}:
iol.portmap[port] = f"R{row}C{col}_{wname}"
else:
if row == 1 or row == db.rows:
iol.portmap[port] = f"R{row}C{col + 1}_{wname}"
else:
iol.portmap[port] = f"R{row + 1}C{col}_{wname}"
if port == 'FCLK':
wname = eclk
if eclk == 'HCLK0' and _device in {'GW1N-1'}:
wname = 'CLK2'
iol.portmap[port] = f"R{row}C{col}_{wname}"
elif typ.startswith("RPLL"):
name = f"PLL_{idx}"
pll = mod.primitives.setdefault(name, codegen.Primitive("rPLL", name))
for paramval in flags:
param, _, val = paramval.partition('=')
pll.params[param] = val
portmap = db.grid[dbrow][dbcol].bels[bel[:-1]].portmap
for port, wname in portmap.items():
pll.portmap[port] = f"R{row}C{col}_{wname}"
elif typ.startswith("PLLVR"):
name = f"PLL_{idx}"
pll = mod.primitives.setdefault(name, codegen.Primitive("PLLVR", name))
for paramval in flags:
param, _, val = paramval.partition('=')
pll.params[param] = val
portmap = db.grid[dbrow][dbcol].bels[bel[:-1]].portmap
for port, wname in portmap.items():
pll.portmap[port] = f"R{row}C{col}_{wname}"
elif typ == "ALU":
#print(flags)
kind, = flags # ALU only have one flag
idx = int(idx)
name = f"R{row}C{col}_ALU_{idx}"
if kind == 'hadder':
kind = '0'
if kind in "012346789" or kind == "C2L" : # main ALU
alu = codegen.Primitive("ALU", name)
alu.params["ALU_MODE"] = kind
if kind != "C2L":
alu.portmap['SUM'] = f"R{row}C{col}_F{idx}"
alu.portmap['CIN'] = f"R{row}C{col}_CIN{idx}"
alu.portmap['I2'] = f"R{row}C{col}_C{idx}"
if idx != 5:
alu.portmap['COUT'] = f"R{row}C{col}_CIN{idx+1}"
else:
alu.portmap['COUT'] = f"R{row}C{col + 1}_CIN{0}"
if kind in "2346789":
alu.portmap['I0'] = f"R{row}C{col}_A{idx}"
alu.portmap['I1'] = f"R{row}C{col}_B{idx}"
if kind in "28":
alu.portmap['I3'] = f"R{row}C{col}_D{idx}"
elif kind == "0":
alu.portmap['I0'] = f"R{row}C{col}_B{idx}"
alu.portmap['I1'] = f"R{row}C{col}_D{idx}"
elif kind == "C2L":
alu.portmap['I0'] = f"R{row}C{col}_B{idx}"
alu.portmap['I1'] = f"R{row}C{col}_D{idx}"
alu.portmap['COUT'] = f"R{row}C{col}_F{idx}"
alu.params["ALU_MODE"] = "9" # XXX
else:
alu.portmap['I0'] = f"R{row}C{col}_A{idx}"
alu.portmap['I1'] = f"R{row}C{col}_D{idx}"
mod.wires.update(alu.portmap.values())
mod.primitives[name] = alu
elif typ == "RAM16":
val0 = sum(1<<x for x in range(0,16) if not x in flags)
val1 = sum(1<<(x-16) for x in range(16,32) if not x in flags)
val2 = sum(1<<(x-32) for x in range(32,48) if not x in flags)
val3 = sum(1<<(x-48) for x in range(48,64) if not x in flags)
name = f"R{row}C{col}_RAM16"
ram16 = codegen.Primitive("RAM16SDP4", name)
ram16.params["INIT_0"] = f"16'b{val0:016b}"
ram16.params["INIT_1"] = f"16'b{val1:016b}"
ram16.params["INIT_2"] = f"16'b{val2:016b}"
ram16.params["INIT_3"] = f"16'b{val3:016b}"
ram16.portmap['DI'] = [f"R{row}C{col}_{x}5" for x in "DCBA"]
ram16.portmap['CLK'] = f"R{row}C{col}_CLK2"
ram16.portmap['WRE'] = f"R{row}C{col}_LSR2"
ram16.portmap['WAD'] = [f"R{row}C{col}_{x}4" for x in "DCBA"]
ram16.portmap['RAD'] = [f"R{row}C{col}_{x}0" for x in "DCBA"]
ram16.portmap['DO'] = [f"R{row}C{col}_F{x}" for x in range(4, -1, -1)]
mod.wires.update(chain.from_iterable([x if isinstance(x, list) else [x] for x in ram16.portmap.values()]))
mod.primitives[name] = ram16
elif typ in {"OSC", "OSCZ", "OSCF", "OSCH", "OSCW", "OSCO"}:
name = f"R{row}C{col}_{typ}"
osc = codegen.Primitive(typ, name)
for paramval in flags:
param, _, val = paramval.partition('=')
osc.params[param] = val
portmap = db.grid[dbrow][dbcol].bels[bel].portmap
for port, wname in portmap.items():
osc.portmap[port] = f"R{row}C{col}_{wname}"
mod.wires.update(osc.portmap.values())
mod.primitives[name] = osc
elif typ == "DFF":
#print(flags)
kind, = flags # DFF only have one flag
if kind == "RAM": continue
idx = int(idx)
port = dffmap[kind]
name = f"R{row}C{col}_{typ}E_{idx}"
dff = codegen.Primitive(kind+"E", name)
dff.portmap['CLK'] = f"R{row}C{col}_CLK{idx//2}"
dff.portmap['D'] = f"R{row}C{col}_F{idx}"
dff.portmap['Q'] = f"R{row}C{col}_Q{idx}"
dff.portmap['CE'] = f"R{row}C{col}_CE{idx//2}"
if port:
dff.portmap[port] = f"R{row}C{col}_LSR{idx//2}"
mod.wires.update(dff.portmap.values())
mod.primitives[name] = dff
cst.cells[name] = (row, col, int(idx) // 2, _sides[int(idx) % 2])
elif typ == "IOB":
try:
kind, = flags.intersection(iobmap.keys())
except ValueError:
continue
flags.remove(kind)
portmap = db.grid[dbrow][dbcol].bels[bel].portmap
name = f"R{row}C{col}_{kind}_{idx}"
wires = set(iobmap[kind]['wires'])
ports = set(chain.from_iterable(iobmap[kind].values())) - wires
iob = codegen.Primitive(kind, name)
if idx in iologic_detected:
wires_suffix = '_IOL'
else:
wires_suffix = ''
for port in wires:
wname = portmap[port]
iob.portmap[portname(port)] = f"R{row}C{col}_{wname}{wires_suffix}"
for port in ports:
iob.portmap[port] = f"R{row}C{col}_{port}{idx}"
wnames = [f"R{row}C{col}_{portmap[w]}" for w in iobmap[kind]['wires']]
mod.wires.update(wnames)
for direction in ['inputs', 'outputs', 'inouts']:
wnames = [f"R{row}C{col}_{w}{idx}" for w in iobmap[kind].get(direction, [])]
getattr(mod, direction).update(wnames)
mod.primitives[name] = iob
# constraints
pos = chipdb.loc2pin_name(db, dbrow, dbcol)
bank = chipdb.loc2bank(db, dbrow, dbcol)
cst.ports[name] = f"{pos}{idx}"
if kind[0:5] == 'TLVDS':
cst.ports[name] = f"{pos}{idx},{pos}{chr(ord(idx) + 1)}"
#iostd = _banks.get(bank)
#if iostd:
# cst.attrs.setdefault(name, {}).update({"IO_TYPE" : iostd})
for flg in flags:
name_val = flg.split('=')
cst.attrs.setdefault(name, {}).update({name_val[0] : name_val[1]})
# gnd = codegen.Primitive("GND", "mygnd")
# gnd.portmap["G"] = "VSS"
# mod.primitives["mygnd"] = gnd
# vcc = codegen.Primitive("VCC", "myvcc")
# vcc.portmap["V"] = "VCC"
# mod.primitives["myvcc"] = vcc
mod.assigns.append(("VCC", "1'b1"))
mod.assigns.append(("VSS", "1'b0"))
def default_device_config():
return {
"JTAG regular_io": "false",
"SSPI regular_io": "false",
"MSPI regular_io": "false",
"READY regular_io": "false",
"DONE regular_io": "false",
"RECONFIG_N regular_io": "false",
"MODE regular_io": "false",
"CRC_check": "true",
"compress": "false",
"encryption": "false",
"security_bit_enable": "true",
"bsram_init_fuse_print": "true",
"download_speed": "250/100",
"spi_flash_address": "0x00FFF000",
"format": "txt",
"background_programming": "false",
"secure_mode": "false"}
def fix_pll_ports(pll):
for portname, up_limit in [('PSDA', 4), ('DUTYDA', 4), ('FDLY', 4), ('FBDSEL', 6), ('IDSEL', 6), ('ODSEL', 6)]:
for n in range(0, up_limit):
if f'{portname}{n}' in pll.portmap.keys():
port = pll.portmap.setdefault(portname, [])
port.append(pll.portmap[f'{portname}{n}'])
pll.portmap.pop(f'{portname}{n}')
def fix_plls(db, mod):
for pll_name, pll in [pr for pr in mod.primitives.items() if pr[1].typ in {'rPLL', 'PLLVR'}]:
if 'INSEL' not in pll.params.keys():
del mod.primitives[pll_name]
continue
disable_unused_pll_ports(pll)
modify_pll_inputs(db, pll)
mod.wires.update(pll.portmap.values())
fix_pll_ports(pll)
def main():
parser = argparse.ArgumentParser(description='Unpack Gowin bitstream')
parser.add_argument('bitstream')
parser.add_argument('-d', '--device', required=True)
parser.add_argument('-o', '--output', default='unpack.v')
parser.add_argument('-s', '--cst', default=None)
parser.add_argument('--noalu', action = 'store_true')
args = parser.parse_args()
global _device
_device = args.device
# For tool integration it is allowed to pass a full part number
m = re.match("GW1N(S?)[A-Z]*-(LV|UV|UX)([0-9])C?([A-Z]{2}[0-9]+P?)(C[0-9]/I[0-9])", _device)
if m:
mods = m.group(1)
luts = m.group(3)
_device = f"GW1N{mods}-{luts}"
with importlib.resources.path('apycula', f'{args.device}.pickle') as path:
with closing(gzip.open(path, 'rb')) as f:
db = pickle.load(f)
global _pinout
_pinout = db.pinout[_device][_packages[_device]]
bitmap = read_bitstream(args.bitstream)[0]
bm = chipdb.tile_bitmap(db, bitmap)
mod = codegen.Module()
cst = codegen.Constraints()
# XXX this PLLs have empty main cell
if _device in {'GW1N-9C', 'GW1N-9'}:
bm_pll = chipdb.tile_bitmap(db, bitmap, empty = True)
bm[(9, 0)] = bm_pll[(9, 0)]
bm[(9, 46)] = bm_pll[(9, 46)]
if _device in {'GW2A-18', 'GW2A-18C'}:
bm_pll = chipdb.tile_bitmap(db, bitmap, empty = True)
bm[(9, 0)] = bm_pll[(9, 0)]
bm[(9, 55)] = bm_pll[(9, 55)]
bm[(45, 0)] = bm_pll[(45, 0)]
bm[(45, 55)] = bm_pll[(45, 55)]
for (drow, dcol, dname), (srow, scol, sname) in db.aliases.items():
src = f"R{srow+1}C{scol+1}_{sname}"
dest = f"R{drow+1}C{dcol+1}_{dname}"
mod.wires.update({src, dest})
mod.assigns.append((dest, src))
# banks first: need to know iostandards
for pos in db.corners.keys():
row, col = pos
try:
t = bm[(row, col)]
except KeyError:
continue
bels, pips, clock_pips = parse_tile_(db, row, col, t)
#print("bels:", bels)
tile2verilog(row, col, bels, pips, clock_pips, mod, cst, db)
for idx, t in bm.items():
row, col = idx
# skip banks & dual pisn
if (row, col) in db.corners:
continue
#for bitrow in t:
# print(*bitrow, sep='')
#if idx == (5, 0):
# from fuse_h4x import *
# fse = readFse(open("/home/pepijn/bin/gowin/IDE/share/device/GW1N-1/GW1N-1.fse", 'rb'))
# breakpoint()
bels, pips, clock_pips = parse_tile_(db, row, col, t, noiostd = False)
#print("bels:", idx, bels)
#print(pips)
#print(clock_pips)
if args.noalu:
removeALUs(bels)
else:
removeLUTs(bels)
ram16_remove_bels(bels)
tile2verilog(row, col, bels, pips, clock_pips, mod, cst, db)
fix_plls(db, mod)
with open(args.output, 'w') as f:
mod.write(f)
if args.cst:
with open(args.cst, 'w') as f:
cst.write(f)
if __name__ == "__main__":
main()
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/gowin_unpack.py
|
gowin_unpack.py
|
import sys
import importlib.resources
import pickle
import gzip
import argparse
import re
from contextlib import contextmanager, closing
from collections import Counter
from apycula import chipdb
class Bba(object):
def __init__(self, file):
self.file = file
self.block_idx = Counter()
def __getattr__(self, attr):
def write_value(val):
self.file.write(f"{attr} {val}\n")
return write_value
def str(self, val, sep="|"):
self.file.write(f"str {sep}{val}{sep}\n")
@contextmanager
def block(self, prefix="block"):
idx = self.block_idx[prefix]
self.block_idx.update([prefix])
name = f"{prefix}_{idx}"
self.push(name)
self.label(name)
try:
yield name
finally:
self.pop(name)
constids = ['']
ids = []
def id_string(s):
try:
return constids.index(s)
except ValueError:
pass
try:
return len(constids)+ids.index(s)
except ValueError:
ids.append(s)
return len(constids)+len(ids)-1
def id_strings(b):
with b.block('idstrings') as blk:
for s in ids:
b.str(s)
b.u16(len(constids))
b.u16(len(ids))
b.ref(blk)
# forbidden wires. prevent nextpnr from using wires with unclear purpose for routing
_forbiden_wires = {
'UNK105', 'UNK106', 'UNK107', 'UNK108', 'UNK109', 'UNK110', 'UNK111',
'UNK112', 'UNK113', 'UNK114', 'UNK115', 'UNK116', 'UNK117', 'UNK118',
'UNK119', 'UNK120',
'UNK129', 'UNK130', 'UNK131', 'UNK132', 'UNK133', 'UNK134', 'UNK135',
'UNK136', 'UNK137', 'UNK138', 'UNK139', 'UNK140', 'UNK141', 'UNK142',
'UNK143', 'UNK144', 'UNK145', 'UNK146', 'UNK147', 'UNK148', 'UNK149',
'UNK150', 'UNK151', 'UNK152',
# the purpose of these is known: they are the outputs of the central MUX for
# clocks #6 and #7, but the mechanism of their activation is unclear so forbid
'P16A', 'P26A', 'P36A', 'P46A', 'P17A', 'P27A', 'P37A', 'P47A',
}
def write_pips(b, pips):
num = 0
with b.block("pips") as blk:
for dest, srcs in pips.items():
for src in srcs:
if src in _forbiden_wires or dest in _forbiden_wires:
continue
num += 1
b.u16(id_string(dest))
b.u16(id_string(src))
b.u32(num)
b.ref(blk)
def write_bels(b, bels):
with b.block("bels") as blk:
for typ, bel in bels.items():
if bel.simplified_iob:
b.u16(id_string(f'{typ}S'))
else:
b.u16(id_string(typ))
with b.block("portmap") as port_blk:
for dest, src in bel.portmap.items():
b.u16(id_string(dest))
b.u16(id_string(src))
b.u16(len(bel.portmap))
b.ref(port_blk)
b.u32(len(bels))
b.ref(blk)
def write_aliases(b, aliases):
with b.block('aliases') as blk:
for dest, src in aliases.items():
b.u16(id_string(dest))
b.u16(id_string(src))
b.u32(len(aliases))
b.ref(blk)
def write_tile(b, tile):
with b.block('tile') as blk:
write_bels(b, tile.bels)
write_pips(b, tile.pips)
write_pips(b, tile.clock_pips)
write_aliases(b, tile.aliases)
return blk
def write_grid(b, grid):
tiles = {}
with b.block('grid') as grid_block:
for row in grid:
for tile in row:
if id(tile) in tiles:
b.ref(tiles[id(tile)])
else:
blk = write_tile(b, tile)
tiles[id(tile)] = blk
b.ref(blk)
b.ref(grid_block)
def write_global_aliases(b, db):
num = 0
with b.block('aliases') as blk:
aliases = sorted(db.aliases.items(),
key=lambda i: (i[0][0], i[0][1], id_string(i[0][2])))
for (drow, dcol, dest), (srow, scol, src) in aliases:
if src in _forbiden_wires or dest in _forbiden_wires:
continue
num += 1
b.u16(drow)
b.u16(dcol)
b.u16(id_string(dest))
b.u16(srow)
b.u16(scol)
b.u16(id_string(src))
b.u32(num)
b.ref(blk)
def write_timing(b, timing):
with b.block('timing') as blk:
for speed, groups in timing.items():
b.u32(id_string(speed))
with b.block('timing_group') as tg:
for group, types in groups.items():
b.u32(id_string(group))
with b.block('timing_types') as tt:
for name, items in types.items():
try:
items[0] # QUACKING THE DUCK
b.u32(id_string(name))
for item in items:
b.u32(int(item*1000))
except TypeError:
pass
b.u32(len(types))
b.ref(tt)
b.u32(len(groups))
b.ref(tg)
b.u32(len(timing))
b.ref(blk)
def write_partnumber_packages(b, db):
with b.block("partnumber_packages") as blk:
for partnumber, pkg_rec in db.packages.items():
pkg, device, speed = pkg_rec
b.u32(id_string(partnumber))
b.u32(id_string(pkg))
b.u32(id_string(device))
b.u32(id_string(speed))
b.u32(len(db.packages))
b.ref(blk)
pin_re = re.compile(r"IO([TBRL])(\d+)([A-Z])")
def iob2bel(db, name):
banks = {'T': [(1, n) for n in range(1, db.cols)],
'B': [(db.rows, n) for n in range(1, db.cols)],
'L': [(n, 1) for n in range(1, db.rows)],
'R': [(n, db.cols) for n in range(1, db.rows)]}
side, num, pin = pin_re.match(name).groups()
row, col = banks[side][int(num)-1]
return f"R{row}C{col}_IOB{pin}"
def write_pinout(b, db):
with b.block("variants") as blk:
for device, pkgs in db.pinout.items():
b.u32(id_string(device))
with b.block("packages") as pkgblk:
for pkg, pins in pkgs.items():
b.u32(id_string(pkg))
with b.block("pins") as pinblk:
for num, loccfg in pins.items():
loc, cfgs = loccfg
b.u16(id_string(num))
b.u16(id_string(iob2bel(db, loc)))
with b.block("cfgs") as cfgblk:
for cfg in cfgs:
b.u32(id_string(cfg))
b.u32(len(cfgs))
b.ref(cfgblk)
b.u32(len(pins))
b.ref(pinblk)
b.u32(len(pkgs))
b.ref(pkgblk)
b.u32(len(db.pinout))
b.ref(blk)
def write_chipdb(db, f, device):
cdev=device.replace('-', '_')
b = Bba(f)
b.pre('#include "nextpnr.h"')
b.pre('#include "embed.h"')
b.pre('NEXTPNR_NAMESPACE_BEGIN')
with b.block(f'chipdb_{cdev}') as blk:
b.str(device)
b.u32(2) # version
b.u16(db.rows)
b.u16(db.cols)
write_grid(b, db.grid)
write_global_aliases(b, db)
write_timing(b, db.timing)
write_partnumber_packages(b, db)
write_pinout(b, db)
id_strings(b)
b.post(f'EmbeddedFile chipdb_file_{cdev}("gowin/chipdb-{device}.bin", {blk});')
b.post('NEXTPNR_NAMESPACE_END')
def read_constids(f):
xre = re.compile(r"X\((.*)\)")
for line in f:
m = xre.match(line)
if m:
constids.append(m.group(1))
return ids
def main():
parser = argparse.ArgumentParser(description='Make Gowin BBA')
parser.add_argument('-d', '--device', required=True)
parser.add_argument('-i', '--constids', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout)
args = parser.parse_args()
read_constids(args.constids)
with importlib.resources.path('apycula', f'{args.device}.pickle') as path:
with closing(gzip.open(path, 'rb')) as f:
db = pickle.load(f)
write_chipdb(db, args.output, args.device)
if __name__ == "__main__":
main()
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/gowin_bba.py
|
gowin_bba.py
|
import sys
import re
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input-freq-mhz", help="PLL Input Frequency", type=float, default=27
)
parser.add_argument(
"-o", "--output-freq-mhz", help="PLL Output Frequency", type=float, default=108
)
parser.add_argument("-d", "--device", help="Device", type=str, default="GW1NR-9 C6/I5")
parser.add_argument(
"-f",
"--filename",
help="Save PLL configuration as Verilog to file",
type=str,
default=None,
)
parser.add_argument(
"-m",
"--module-name",
help="Specify different Verilog module name than the default 'pll'",
type=str,
default="pll",
)
parser.add_argument("-l", "--list-devices", help="list device", action="store_true")
args = parser.parse_args()
device_name = args.device
match = re.search(
r"(GW1[A-Z]{1,3})-[A-Z]{1,2}([0-9])[A-Z]{1,3}[0-9]{1,3}P*(C[0-9]/I[0-9])",
device_name,
)
if match:
device_name = f"{match.group(1)}-{match.group(2)} {match.group(3)}"
device_limits = {
"GW1N-1 C6/I5": {
"comment": "Untested",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 900,
"clkout_min": 3.125,
"clkout_max": 450,
},
"GW1N-1 C5/I4": {
"comment": "Untested",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 320,
"vco_min": 320,
"vco_max": 720,
"clkout_min": 2.5,
"clkout_max": 360,
},
"GW1NR-2 C7/I6": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 800,
"clkout_min": 3.125,
"clkout_max": 750,
},
"GW1NR-2 C6/I5": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 800,
"clkout_min": 3.125,
"clkout_max": 750,
},
"GW1NR-2 C5/I4": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 320,
"vco_min": 320,
"vco_max": 640,
"clkout_min": 2.5,
"clkout_max": 640,
},
"GW1NR-4 C6/I5": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1000,
"clkout_min": 3.125,
"clkout_max": 500,
},
"GW1NR-4 C5/I4": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 320,
"vco_min": 320,
"vco_max": 800,
"clkout_min": 2.5,
"clkout_max": 400,
},
"GW1NSR-4 C7/I6": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1200,
"clkout_min": 3.125,
"clkout_max": 600,
},
"GW1NSR-4 C6/I5": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1200,
"clkout_min": 3.125,
"clkout_max": 600,
},
"GW1NSR-4 C5/I4": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 320,
"vco_min": 320,
"vco_max": 960,
"clkout_min": 2.5,
"clkout_max": 480,
},
"GW1NSR-4C C7/I6": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1200,
"clkout_min": 3.125,
"clkout_max": 600,
},
"GW1NSR-4C C6/I5": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1200,
"clkout_min": 3.125,
"clkout_max": 600,
},
"GW1NSR-4C C5/I4": {
"comment": "Untested",
"pll_name": "PLLVR",
"pfd_min": 3,
"pfd_max": 320,
"vco_min": 320,
"vco_max": 960,
"clkout_min": 2.5,
"clkout_max": 480,
},
"GW1NR-9 C7/I6": {
"comment": "Untested",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1200,
"clkout_min": 3.125,
"clkout_max": 600,
},
"GW1NR-9 C6/I5": {
"comment": "tested on TangNano9K Board",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1200,
"clkout_min": 3.125,
"clkout_max": 600,
},
"GW1NR-9 C6/I4": {
"comment": "Untested",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 320,
"vco_min": 3200,
"vco_max": 960,
"clkout_min": 2.5,
"clkout_max": 480,
},
"GW1NZ-1 C6/I5": {
"comment": "untested",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 800,
"clkout_min": 3.125,
"clkout_max": 400,
},
"GW2A-18 C8/I7": {
"comment": "untested",
"pll_name": "rPLL",
"pfd_min": 3,
"pfd_max": 400,
"vco_min": 400,
"vco_max": 1000,
"clkout_min": 3.125,
"clkout_max": 500,
},
}
if args.list_devices:
for device in device_limits:
print(f"{device} - {device_limits[device]['comment']}")
sys.exit(0)
if device_name not in device_limits:
print(f"ERROR: device '{device_name}' not found")
sys.exit(1)
limits = device_limits[device_name]
setup = {}
FCLKIN = args.input_freq_mhz
min_diff = FCLKIN
for IDIV_SEL in range(64):
for FBDIV_SEL in range(64):
for ODIV_SEL in [2, 4, 8, 16, 32, 48, 64, 80, 96, 112, 128]:
PFD = FCLKIN / (IDIV_SEL + 1)
if not (limits["pfd_min"] < PFD < limits["pfd_max"]):
continue
CLKOUT = FCLKIN * (FBDIV_SEL + 1) / (IDIV_SEL + 1)
if not (limits["clkout_min"] < CLKOUT < limits["clkout_max"]):
continue
VCO = (FCLKIN * (FBDIV_SEL + 1) * ODIV_SEL) / (IDIV_SEL + 1)
if not (limits["vco_min"] < VCO < limits["vco_max"]):
continue
diff = abs(args.output_freq_mhz - CLKOUT)
if diff < min_diff:
min_diff = diff
setup = {
"IDIV_SEL": IDIV_SEL,
"FBDIV_SEL": FBDIV_SEL,
"ODIV_SEL": ODIV_SEL,
"PFD": PFD,
"CLKOUT": CLKOUT,
"VCO": VCO,
"ERROR": diff,
}
if setup:
extra_options = ""
if limits["pll_name"] == "PLLVR":
extra_options = ".VREN(1'b1),"
pll_v = f"""/**
* PLL configuration
*
* This Verilog module was generated automatically
* using the gowin-pll tool.
* Use at your own risk.
*
* Target-Device: {device_name}
* Given input frequency: {args.input_freq_mhz:0.3f} MHz
* Requested output frequency: {args.output_freq_mhz:0.3f} MHz
* Achieved output frequency: {setup['CLKOUT']:0.3f} MHz
*/
module {args.module_name}(
input clock_in,
output clock_out,
output locked
);
{limits['pll_name']} #(
.FCLKIN("{args.input_freq_mhz}"),
.IDIV_SEL({setup['IDIV_SEL']}), // -> PFD = {setup['PFD']} MHz (range: {limits['pfd_min']}-{limits['pfd_max']} MHz)
.FBDIV_SEL({setup['FBDIV_SEL']}), // -> CLKOUT = {setup['CLKOUT']} MHz (range: {limits['vco_min']}-{limits['clkout_max']} MHz)
.ODIV_SEL({setup['ODIV_SEL']}) // -> VCO = {setup['VCO']} MHz (range: {limits['clkout_max']}-{limits['vco_max']} MHz)
) pll (.CLKOUTP(), .CLKOUTD(), .CLKOUTD3(), .RESET(1'b0), .RESET_P(1'b0), .CLKFB(1'b0), .FBDSEL(6'b0), .IDSEL(6'b0), .ODSEL(6'b0), .PSDA(4'b0), .DUTYDA(4'b0), .FDLY(4'b0), {extra_options}
.CLKIN(clock_in), // {args.input_freq_mhz} MHz
.CLKOUT(clock_out), // {setup['CLKOUT']} MHz
.LOCK(locked)
);
endmodule
"""
if args.filename:
open(args.filename, "w").write(pll_v)
else:
print(pll_v)
if __name__ == "__main__":
main()
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/gowin_pll.py
|
gowin_pll.py
|
import os
import sys
import json
import struct
tc = 8 # number of timing classes
chunklen = 15552 # length of each class
def to_float(s):
return struct.unpack('f', s)[0]
def float_data(data, paths):
res = {}
for i, name in enumerate(paths):
for j in range(4):
idx = i*4+j
res.setdefault(name,[]).append(to_float(data[idx*4:idx*4+4]))
return res
def to_int(s):
return struct.unpack('I', s)[0]
def int_data(data, paths):
res = {}
for i, name in enumerate(paths):
res[name] = to_int(data[i*4:i*4+4])
return res
def parse_lut(data):
paths = ['a_f', 'b_f', 'c_f', 'd_f', 'a_ofx', 'b_ofx', 'c_ofx', 'd_ofx', 'm0_ofx0', 'm1_ofx1', 'fx_ofx1']
return float_data(data, paths)
def parse_alu(data):
paths = ['a_f', 'b_f', 'd_f', 'a0_fco', 'b0_fco', 'd0_fco', 'fci_fco', 'fci_f0']
return float_data(data, paths)
def parse_sram(data):
paths = [
'rad0_do', # 0 also unnumbered
'rad1_do', # 4
'rad2_do', # 8
'rad3_do', # 0xc
'clk_di_set', # 0x10
'clk_di_hold', # 0x14
'clk_wre_set', # 0x18
'clk_wre_hold', # 0x1c
'clk_wad0_set', # 0x20 also unnumbered
'clk_wad0_hold', # 0x24 also unnumbered
'clk_wad1_set', # 0x28
'clk_wad1_hold', # 0x2c
'clk_wad2_set', # 0x30
'clk_wad2_hold', # 0x34
'clk_wad3_set', # 0x38
'clk_wad3_hold', # 0x3c
'clk_do', # 0x40
]
return float_data(data, paths)
def parse_dff(data):
paths = [
'di_clksetpos', # 0x0
'di_clksetneg', # 0x4
'di_clkholdpos', # 0x8
'di_clkholdneg', # 0xc
'ce_clksetpos', # 0x10
'ce_clksteneg', # 0x14
'ce_clkholdpos', # 0x18
'ce_clkholdneg', # 0x1c
'lsr_clksetpos_syn', # 0x20
'lsr_clksetneg_syn', # 0x24
'lsr_clkholdpos_syn', # 0x28
'lsr_clkholdneg_syn', # 0x2c
'clk_qpos', # 0x30
'clk_qneg', # 0x34
'lsr_q', # 0x38
'lsr_clksetpos_asyn', # 0x3c
'lsr_clksetneg_asyn', # 0x40
'lsr_clkholdpos_asyn', # 0x44
'lsr_clkholdneg_asyn', # 0x48
'clk_clk', # 0x4c
'lsr_lsr', # 0x50
]
return float_data(data, paths)
def parse_dl(data):
pass
def parse_iddroddr(data):
pass
def parse_pll(data):
pass
def parse_dll(data):
pass
def parse_bram(data):
paths = [
'clka_doa', # 0
'clkb_dob', # 4
'clkb_do', # 8
'clk_do', # 0xc
'clka_reseta_set', # 0x10
'clka_ocea_set', # 0x14
'clka_cea_set', # 0x18
'clka_wrea_set', # 0x1c
'clka_dia_set', # 0x20
'clka_di_set', # 0x24
'clka_ada_set', # 0x28
'clka_blksel_set', # 0x2c
'clka_reseta_hold', # 0x30
'clka_ocea_hold', # 0x34
'clka_cea_hold', # 0x38
'clka_wrea_hold', # 0x3c
'clka_dia_hold', # 0x40
'clka_di_hold', # 0x44
'clka_ada_hold', # 0x48
'clka_blkset_hold', # 0x4c
'clkb_resetb_set', # 0x50
'clkb_oceb_set', # 0x54
'clkb_ceb_set', # 0x58
'clkb_oce_set' # 0x5c
'clkb_wreb_set', # 0x60
'clkb_dib_set', # 0x64
'clkb_adb_set', # 0x68
'clkb_blkset_set', # 0x6c
'clkb_resetb_hold', # 0x70
'clkb_oceb_hold', # 0x74
'clkb_ceb_hold', # 0x78
'clkb_oce_hold', # 0x7c
'clkb_wreb_hold', # 0x80
'clkb_dib_hold', # 0x84
'clkb_adb_hold', # 0x88
'clkb_blksel_hold', # 0x8c
'clk_ce_set', # 0x90
'clk_oce_set', # 0x94
'clk_reset_set', # 0x98
'clk_wre_set', # 0x9c
'clk_ad_set', # 0xa0
'clk_di_set', # 0xa4
'clk_blksel_set', # 0a8
'clk_ce_hold', # 0xac
'clk_oce_hold', # 0xb0
'clk_reset_hold', # 0xb4
'clk_wre_hold', # 0xb8
'clk_ad_hold', #0xbc
'clk_di_hold', # 0xc0
'clk_blksel_hold', # 0xc4
'clk_reset_set_syn', # 0xc8
'clk_reset_hold_syn', # 0xcc
'clka_reseta_set_syn', # 0xd0
'clka_reseta_hold_syn', # 0xd4
'clkb_resetb_set_syn', # 0xd8
'clkb_resetb_hold_syn', # 0xdc
'clk_clk', # 0xe0
]
return float_data(data, paths)
def parse_dsp(data):
pass
def parse_fanout(data):
paths = [
'X0Fan', # 0x00
'X1Fan', # 0x04
'SX1Fan', # 0x08
'X2Fan', # 0x0C
'X8Fan', # 0x10
'FFan', # 0x14
'QFan', # 0x18
'OFFan', # 0x1c
]
int_paths = [
'X0FanNum',
'X1FanNum',
'SX1FanNum',
'X2FanNum',
'X8FanNum',
'FFanNum',
'QFanNum',
'OFFanNum',
]
return {**float_data(data, paths), **int_data(data[0x80:], int_paths)}
# P/S = primary/secondary clock?
# clock path:
# CIB/PIO -> CENT -> SPINE -> TAP -> BRANCH
# CIB in ECP5 = configurable interconnect block
# PIO in ECP5 = programmable IO
def parse_glbsrc(data):
paths = [
'CIB_CENT_PCLK', # 0x00
'PIO_CENT_PCLK', # 0x04
'CENT_SPINE_PCLK', # 0x08
'SPINE_TAP_PCLK', # 0x0c
'TAP_BRANCH_PCLK', # 0x10
'BRANCH_PCLK', # 0x14
'CIB_PIC_INSIDE', # 0x18
'CIB_CENT_SCLK', # 0x1c
'PIO_CENT_SCLK', # 0x20
'CENT_SPINE_SCLK', # 0x24
'SPINE_TAP_SCLK_0', # 0x28
'SPINE_TAP_SCLK_1', # 0x2c (getter takes index)
'TAP_BRANCH_SCLK', # 0x30
'BRANCH_SCLK', # 0x34
'GSRREC_SET', # 0x38
'GSRREC_HLD', # 0x3c
'GSR_MPW', # 0x40
]
return float_data(data, paths)
# HclkPathDly = 0x8 + 0x0 + 0xc
def parse_hclk(data):
paths = [
'HclkInMux', # 0x0
'HclkHbrgMux', # 0x4
'HclkOutMux', # 0x8
'HclkDivMux', # 0xc
]
return float_data(data, paths)
def parse_iodelay(data):
paths = ['GI_DO', 'SDTAP_DO', 'SETN_DO', 'VALUE_DO',
'SDTAP_DF', 'SETN_DF', 'VALUE_DF']
return float_data(data, paths)
def parse_io(data):
pass
def parse_iregoreg(data):
pass
def parse_wire(data):
paths = [
'X0', # 0x00
'FX1', # 0x04
'X2', # 0x08
'X8', # 0x0C
'ISB', # 0x10
'X0CTL', # 0x14
'X0CLK', # 0x18
'X0ME', # 0x1C
]
return float_data(data, paths)
offsets = {
0x0: parse_lut,
0xb0: parse_alu,
0x130: parse_sram,
0x240: parse_dff,
0x390: parse_dl,
0x4a0: parse_iddroddr,
0x7cc: parse_pll,
0x81c: parse_dll,
0x8bc: parse_bram,
0xc8c: parse_dsp,
0x381c: parse_fanout,
0x38bc: parse_glbsrc,
0x39cc: parse_hclk,
0x3728: parse_iodelay,
0x3278: parse_io,
0x306c: parse_iregoreg,
0x379c: parse_wire,
}
dspoffsets = {
0x0: 'mult', #DSP
0x410: 'mac', #DSP
0x6b0: 'multadd', #DSP
0xaf0: 'multaddsum', #DSP
0x1300: 'padd', #DSP
0x1560: 'alu45', #DSP
}
def parse_chunk(chunk):
for off, parser in offsets.items():
yield parser.__name__[6:], parser(chunk[off:])
def read_tm(f, device):
if device.lower().startswith("gw1n"):
chunk_order = [
"C5/I4",
"C5/I4_LV",
"C6/I5",
"C6/I5_LV",
"ES",
"ES_LV",
"A4",
"A4_LV",
"8",
"9",
"10",
"11",
"C7/I6",
"C7/I6_LV"
]
elif device.lower().startswith("gw2a"):
chunk_order = [
"C8/I7",
"C8/I7_LV",
"C7/I6",
"C7/I6_LV",
"A6",
"A6_LV",
"C9/I8",
"C9/I8_LV",
]
else:
raise Exception("unknown family")
tmdat = {}
for i, chunk in enumerate(iter(lambda: f.read(chunklen), b'')):
try:
speed_class = chunk_order[i]
except IndexError:
speed_class = str(i)
tmdat[speed_class] = {}
assert len(chunk) == chunklen
res = parse_chunk(chunk)
for name, tm in res:
if tm:
tmdat[speed_class][name] = tm
return tmdat
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/tm_h4x.py
|
tm_h4x.py
|
import os
import sys
import json
gowinhome = os.getenv("GOWINHOME")
if not gowinhome:
raise Exception("GOWINHOME not set")
device = sys.argv[1]
with open(f"{gowinhome}/IDE/share/device/{device}/{device}.dat", 'rb') as f:
d = f.read()
data = {}
def insap(path, val):
ref = data
for seg in path[:-1]:
ref = ref.setdefault(seg, {})
if path[-1] in ref:
try:
ref[path[-1]].append(val)
except:
ref[path[-1]] = [ref[path[-1]], val]
else:
ref[path[-1]] = val
def print_u8(name, pos):
v = d[pos]
insap(name, v)
print(f'{name} [0x{pos:06x}]: {v} [0x{v:02x}]')
return pos + 1
def print_u16(name, pos):
v = int.from_bytes(d[pos:pos+2], 'little')
insap(name, v)
print(f'{name} [0x{pos:06x}]: {v} [0x{v:04x}]')
return pos + 2
def print_u64(name, pos):
v = int.from_bytes(d[pos:pos+8], 'little')
insap(name, v)
print(f'{name} [0x{pos:06x}]: {v} [0x{v:016x}]')
return pos + 8
pos = 0x026060
z = [
int.from_bytes(d[pos + i * 2 : pos + i * 2 + 2], 'little')
for i in range(4)
]
grid_h, grid_w, cc_y, cc_x = z
data['rows'] = grid_h
data['cols'] = grid_w
data['center'] = (cc_y, cc_x)
print(grid_h, grid_w)
print(cc_y, cc_x)
for i in [2, 1, 0]:
print(' ', end='')
for x in range(grid_w):
n = x // 10**i % 10
print(n, end='')
print()
print()
data['grid'] = []
for y in range(150):
if y in range(grid_h):
print(f'{y:3} ', end='')
row = []
data['grid'].append(row)
for x in range(200):
idx = y * 200 + x
pos = 5744 + 4 * idx
a = int.from_bytes(d[pos:pos+4], 'little')
pos = 125744
b = d[pos+idx]
c = {
(0, 0): ' ', # empty
(1, 1): 'I', # I/O
(2, 1): 'L', # LVDS (GW2A* only)
(3, 1): 'R', # routing?
(4, 0): 'c', # CFU, disabled
(4, 1): 'C', # CFU
(5, 1): 'M', # CFU with RAM option
(6, 0): 'b', # blockram padding
(6, 1): 'B', # blockram
(7, 0): 'd', # dsp padding
(7, 1): 'D', # dsp
(8, 0): 'p', # pll padding
(8, 1): 'P', # pll
(9, 1): 'Q', # dll
}[a, b]
if y in range(grid_h) and x in range(grid_w):
row.append(c)
if x == cc_x and y == cc_y:
assert c == 'b'
print('#', end='')
else:
print(f'{c}', end='')
else:
assert c == ' '
if y in range(grid_h):
print()
print()
def print_arr8(name, pos, num, used):
arr = list(d[pos:pos+num])
print(name, hex(pos), arr[:used])
insap(name, tuple(arr[:used]))
for i in range(used, num):
assert arr[i] == 0
return pos + num
def print_arr16(name, pos, num, used=None):
if used is None:
used = num
arr = [int.from_bytes(d[pos+i*2:pos+i*2+2], 'little', signed=True) for i in range(num)]
print(name, hex(pos), arr[:used])
insap(name, tuple(arr[:used]))
for i in range(used, num):
assert arr[i] == -1
return pos + num * 2
def print_arr32(name, pos, num, used=None):
if used is None:
used = num
arr = [int.from_bytes(d[pos+i*4:pos+i*4+4], 'little', signed=True) for i in range(num)]
print(name, hex(pos), arr[:used])
insap(name, tuple(arr[:used]))
for i in range(used, num):
assert arr[i] == 0
return pos + num * 4
print()
pos = 0xc8
pos = print_u8(['NumLuts'], pos)
pos = print_u8(['NumLutIns'], pos)
for i in range(32):
pos = print_arr16(['LutIns'], pos, 0x1c)
pos = print_arr16(['Luts'], pos, 32)
print()
pos = print_u8(['NumX0s'], pos)
pos = print_u8(['NumX0Ins'], pos)
for i in range(8):
pos = print_arr16(['X0Ins'], pos, 0x1c)
pos = print_arr16(['X0s'], pos, 8)
print()
pos = print_u8(['NumX1s'], pos)
pos = print_u8(['NumX1Ins'], pos)
for i in range(12):
pos = print_arr16(['X1Ins'], pos, 0x14)
pos = print_arr16(['X1s'], pos, 12)
print()
pos = print_u8(['NumX2s'], pos)
pos = print_u8(['NumX2Ins'], pos)
for i in range(32):
pos = print_arr16(['X2Ins'], pos, 0x15)
pos = print_arr16(['X2s'], pos, 32)
print()
pos = print_u8(['NumX8s'], pos)
pos = print_u8(['NumX8Ins'], pos)
for i in range(16):
pos = print_arr16(['X8Ins'], pos, 0x14)
pos = print_arr16(['X8s'], pos, 16)
print()
pos = print_u8(['NumClks'], pos)
pos = print_u8(['NumClkIns'], pos)
for i in range(3):
pos = print_arr16(['ClkIns'], pos, 0x1c)
pos = print_arr16(['Clks'], pos, 3)
print()
pos = print_u8(['NumLsrs'], pos)
pos = print_u8(['NumLsrIns'], pos)
for i in range(3):
pos = print_arr16(['LsrIns'], pos, 0x14)
pos = print_arr16(['Lsrs'], pos, 3)
print()
pos = print_u8(['NumCe'], pos)
pos = print_u8(['NumCeIns'], pos)
for i in range(3):
pos = print_arr16(['CeIns'], pos, 0x14)
pos = print_arr16(['Ces'], pos, 3)
print()
pos = print_u8(['NumSels'], pos)
pos = print_u8(['NumSelIns'], pos)
for i in range(8):
pos = print_arr16(['SelIns'], pos, 9)
pos = print_arr16(['Sels'], pos, 8)
print()
pos = print_u8(['NumX11s'], pos)
pos = print_u8(['NumX11Ins'], pos)
for i in range(8):
pos = print_arr16(['X11Ins'], pos, 1)
pos = print_arr16(['X11s'], pos, 8)
assert pos == 0x166e
pos = 0x026068
print()
pos = print_arr16(['Dqs', 'TA'], pos, 200, grid_w)
pos = print_arr16(['Dqs', 'BA'], pos, 200, grid_w)
pos = print_arr16(['Dqs', 'LA'], pos, 150, grid_h)
pos = print_arr16(['Dqs', 'RA'], pos, 150, grid_h)
pos = print_arr16(['Dqs', 'TB'], pos, 200, grid_w)
pos = print_arr16(['Dqs', 'BB'], pos, 200, grid_w)
pos = print_arr16(['Dqs', 'LB'], pos, 150, grid_h)
pos = print_arr16(['Dqs', 'RB'], pos, 150, grid_h)
print()
pos = print_arr32(['Cfg', 'TA'], pos, 200, grid_w)
pos = print_arr32(['Cfg', 'BA'], pos, 200, grid_w)
pos = print_arr32(['Cfg', 'LA'], pos, 150, grid_h)
pos = print_arr32(['Cfg', 'RA'], pos, 150, grid_h)
pos = print_arr32(['Cfg', 'TB'], pos, 200, grid_w)
pos = print_arr32(['Cfg', 'BB'], pos, 200, grid_w)
pos = print_arr32(['Cfg', 'LB'], pos, 150, grid_h)
pos = print_arr32(['Cfg', 'RB'], pos, 150, grid_h)
pos = print_arr32(['SpecCfg', 'IOL'], pos, 10, 10)
pos = print_arr32(['SpecCfg', 'IOR'], pos, 10, 10)
print()
pos = print_arr16(['Bank', 'TA'], pos, 200, grid_w)
pos = print_arr16(['Bank', 'BA'], pos, 200, grid_w)
pos = print_arr16(['Bank', 'LA'], pos, 150, grid_w)
pos = print_arr16(['Bank', 'RA'], pos, 150, grid_w)
pos = print_arr16(['Bank', 'TA'], pos, 200, grid_w)
pos = print_arr16(['Bank', 'BA'], pos, 200, grid_w)
pos = print_arr16(['Bank', 'LA'], pos, 150, grid_w)
pos = print_arr16(['Bank', 'RA'], pos, 150, grid_w)
pos = print_arr16(['Bank', 'SpecIOL'], pos, 10, 10)
pos = print_arr16(['Bank', 'SpecIOR'], pos, 10, 10)
print()
pos = print_arr16(['X16', 'TA'], pos, 200, grid_w)
pos = print_arr16(['X16', 'BA'], pos, 200, grid_w)
pos = print_arr16(['X16', 'LA'], pos, 150, grid_w)
pos = print_arr16(['X16', 'RA'], pos, 150, grid_w)
pos = print_arr16(['X16', 'TA'], pos, 200, grid_w)
pos = print_arr16(['X16', 'BA'], pos, 200, grid_w)
pos = print_arr16(['X16', 'LA'], pos, 150, grid_w)
pos = print_arr16(['X16', 'RA'], pos, 150, grid_w)
pos = print_arr16(['X16', 'SpecIOL'], pos, 10, 10)
pos = print_arr16(['X16', 'SpecIOR'], pos, 10, 10)
print()
pos = print_arr8(['TrueLvds', 'TopA'], pos, 200, grid_w)
pos = print_arr8(['TrueLvds', 'BottomA'], pos, 200, grid_w)
pos = print_arr8(['TrueLvds', 'LeftA'], pos, 150, grid_h)
pos = print_arr8(['TrueLvds', 'RightA'], pos, 150, grid_h)
pos = print_arr8(['TrueLvds', 'TopB'], pos, 200, grid_w)
pos = print_arr8(['TrueLvds', 'BottomB'], pos, 200, grid_w)
pos = print_arr8(['TrueLvds', 'LeftB'], pos, 150, grid_h)
pos = print_arr8(['TrueLvds', 'RightB'], pos, 150, grid_h)
pos = print_arr8(['TrueLvds', 'SpecIOL'], pos, 10, 10)
pos = print_arr8(['TrueLvds', 'SpecIOR'], pos, 10, 10)
print()
pos = print_arr32(['Type', 'TopA'], pos, 200, grid_w)
pos = print_arr32(['Type', 'BottomA'], pos, 200, grid_w)
pos = print_arr32(['Type', 'LeftA'], pos, 150, grid_h)
pos = print_arr32(['Type', 'RightA'], pos, 150, grid_h)
pos = print_arr32(['Type', 'TopB'], pos, 200, grid_w)
pos = print_arr32(['Type', 'BottomB'], pos, 200, grid_w)
pos = print_arr32(['Type', 'LeftB'], pos, 150, grid_h)
pos = print_arr32(['Type', 'RightB'], pos, 150, grid_h)
print(hex(pos))
print()
pos = 0x2dee4
for i in range(10):
pos = print_arr8(['SpecIOL', i], pos, 15, 15)
print()
for i in range(10):
pos = print_arr8(['SpecIOR', i], pos, 15, 15)
print(hex(pos))
print()
#print(d[pos:][:0x200].hex())
def print_outs(name, pos, num):
print(f'{name} 0x{pos:06x} [{num}]')
for i in range(num):
a = int.from_bytes(d[pos:pos+2], 'little', signed=True)
b = int.from_bytes(d[pos+2:pos+4], 'little', signed=True)
c = int.from_bytes(d[pos+4:pos+6], 'little', signed=True)
insap(name, (a, b, c))
if a != -1 or b != -1 or c != -1:
print(f'\t{i:2}: {a}, {b}, {c}')
pos += 6
return pos
def print_mult(name, pos, num):
print(f'{name} 0x{pos:06x} [{num}]')
for i in range(num):
a = int.from_bytes(d[pos:pos+2], 'little', signed=True)
b = int.from_bytes(d[pos+2:pos+4], 'little', signed=True)
c = int.from_bytes(d[pos+4:pos+6], 'little', signed=True)
e = int.from_bytes(d[pos+6:pos+8], 'little', signed=True)
insap(name, (a, b, c, e))
if a != -1 or b != -1 or c != -1 or e != -1:
print(f'\t{i:2}: {a}, {b}, {c}, {e}')
pos += 8
return pos
def print_clkins(name, pos, num):
print(f'{name} 0x{pos:06x} [{num}]')
for i in range(num):
a = int.from_bytes(d[pos:pos+2], 'little', signed=True)
b = int.from_bytes(d[pos+2:pos+4], 'little', signed=True)
insap(name, (a, b))
if a != -1 or b != -1:
print(f'\t{i:2}: {a}, {b}')
pos += 4
return pos
pos = 0x4e7fc
print('FS GRID')
for _ in range(grid_h-2):
cur = d[pos:pos+200]
assert not any(cur[grid_w-2:])
pos += 200
print(cur[:grid_w-2].decode())
for _ in range(grid_h-2, 150):
cur = d[pos:pos+200]
assert not any(cur)
pos += 200
print()
assert pos == 0x55d2c
pos = print_u16(['IobufAIn'], pos)
pos = print_u16(['IobufAOut'], pos)
pos = print_u16(['IobufAOE'], pos)
pos = print_u16(['IObufAIO'], pos)
pos = print_u16(['IobufBIn'], pos)
pos = print_u16(['IobufBOut'], pos)
pos = print_u16(['IobufBOE'], pos)
pos = print_u16(['IObufBIO'], pos)
pos = print_arr16(['IobufIns'], pos, 10)
pos = print_arr16(['IobufOuts'], pos, 10)
pos = print_arr16(['IobufOes'], pos, 10)
pos = print_arr16(['IologicAIn'], pos, 0x31)
pos = print_arr16(['IologicAOut'], pos, 0x16)
pos = print_arr16(['IologicBIn'], pos, 0x31)
pos = print_arr16(['IologicBOut'], pos, 0x16)
pos = print_arr16(['BsramIn'], pos, 0x84)
pos = print_arr16(['BsramOut'], pos, 0x48)
pos = print_arr16(['BsramInDlt'], pos, 0x84)
pos = print_arr16(['BsramOutDlt'], pos, 0x48)
pos = print_arr16(['SsramIO'], pos, 0x1c)
pos = print_arr16(['PllIn'], pos, 0x24)
pos = print_arr16(['PllOut'], pos, 0x5)
pos = print_arr16(['PllInDlt'], pos, 0x24)
pos = print_arr16(['PllOutDlt'], pos, 0x5)
pos = print_clkins(['PllClkin'], pos, 6)
pos = print_arr16(['SpecPll0Ins'], pos, 108)
pos = print_arr16(['SpecPll0Outs'], pos, 15)
pos = print_arr16(['SpecPll0Clkin'], pos, 18)
pos = print_arr16(['SpecPll1Ins'], pos, 108)
pos = print_arr16(['SpecPll1Outs'], pos, 15)
pos = print_arr16(['SpecPll1Clkin'], pos, 18)
pos = print_arr16(['DllIn'], pos, 4)
pos = print_arr16(['DllOut'], pos, 9)
pos = print_arr16(['SpecDll0Ins'], pos, 12)
pos = print_arr16(['SpecDll0Outs'], pos, 27)
pos = print_arr16(['SpecDll1Ins'], pos, 12)
pos = print_arr16(['SpecDll1Outs'], pos, 27)
pos = print_mult(['MultIn'], pos, 0x4f)
pos = print_mult(['MultOut'], pos, 0x48)
pos = print_mult(['MultInDlt'], pos, 0x4f)
pos = print_mult(['MultOutDlt'], pos, 0x48)
pos = print_mult(['PaddIn'], pos, 0x4c)
pos = print_mult(['PaddOut'], pos, 0x36)
pos = print_mult(['PaddInDlt'], pos, 0x4c)
pos = print_mult(['PaddOutDlt'], pos, 0x36)
pos = print_clkins(['AluIn'], pos, 0xa9)
pos = print_clkins(['AluOut'], pos, 0x6d)
pos = print_clkins(['AluInDlt'], pos, 0xa9)
pos = print_clkins(['AluOutDlt'], pos, 0x6d)
pos = print_clkins(['MdicIn'], pos, 0x36)
pos = print_clkins(['MdicInDlt'], pos, 0x36)
pos = print_mult(['CtrlIn'], pos, 0xe)
pos = print_mult(['CtrlInDlt'], pos, 0xe)
print()
#print(hex(pos))
print(d[pos:363662].hex())
pos = 363662
#print(hex(pos))
for i in range(320):
pos = print_arr16(['CiuConnection', i], pos, 60)
pos = print_arr16(['CiuFanoutNum'], pos, 320)
for i in range(320):
pos = print_arr16(['CiuBdConnection', i], pos, 60)
pos = print_arr16(['CiuBdFanoutNum'], pos, 320)
for i in range(320):
pos = print_arr16(['CiuCornerConnection', i], pos, 60)
pos = print_arr16(['CiuCornerFanoutNum'], pos, 320)
for i in range(106):
pos = print_arr16(['CmuxInNodes', i], pos, 73)
for i in range(106):
pos = print_arr16(['CmuxIns', i], pos, 3)
print()
pos = print_arr16(['DqsRLoc'], pos, 0x16)
pos = print_arr16(['DqsCLoc'], pos, 0x16)
pos = print_arr16(['JtagIns'], pos, 5)
pos = print_arr16(['JtagOuts'], pos, 11)
pos = print_arr16(['ClksrcIns'], pos, 0x26)
pos = print_arr16(['ClksrcOuts'], pos, 16)
pos = print_outs(['UfbIns'], pos, 0x5a)
pos = print_outs(['UfbOuts'], pos, 0x20) # 20
pos = pos + 4
pos = print_outs(['McuIns'], pos, 0x109)
pos = print_outs(['McuOuts'], pos, 0x174)
pos = print_outs(['EMcuIns'], pos, 0x10e)
pos = print_outs(['EMcuOuts'], pos, 0x13f)
pos = print_outs(['AdcIns'], pos, 0xf)
pos = print_outs(['AdcOuts'], pos, 13)
pos = print_outs(['Usb2PhyIns'], pos, 0x46)
pos = print_outs(['Usb2PhyOuts'], pos, 0x2a)
pos = print_outs(['Eflash128kIns'], pos, 0x39)
pos = print_outs(['Eflash128kOuts'], pos, 0x21)
pos = print_outs(['SpmiIns'], pos, 0x17)
pos = print_outs(['SpmiOuts'], pos, 0x2f)
pos = print_outs(['I3cIns'], pos, 0x26)
pos = print_outs(['I3cOuts'], pos, 0x28)
#assert pos == 0x6fd18
with open(f'{device}.json', 'w') as f:
json.dump(data, f)
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/dat19_h4x.py
|
dat19_h4x.py
|
from os.path import expanduser
from glob import glob
import json
import os
import csv
VeryTrue = 2
# caches
# .CSV index of vendor files {(device, package) : file_name}
_pindef_index = {}
# (device, package) : pins
_pindef_files = {}
def get_package(device, package, special_pins):
global _pindef_files
if (device, package) not in _pindef_files:
gowinhome = os.getenv("GOWINHOME")
if not gowinhome:
raise Exception("GOWINHOME not set")
with open(_pindef_index[(device, package)]) as f:
pins = json.load(f)
_pindef_files[(device, package)] = [d for d in pins['PIN_DATA'] if d['TYPE'] == 'I/O']
if special_pins != VeryTrue:
pins = [pin for pin in _pindef_files[(device, package)]
if 'CFG' not in pin.keys() or (
pin['CFG'] != 'RECONFIG_N' and not pin['CFG'].startswith('JTAGSEL_N'))]
else:
pins = _pindef_files[(device, package)]
if not special_pins:
return [pin for pin in pins if 'CFG' not in pin.keys()]
return pins
# {partnumber : (pkg, device, speed)}
def all_packages(device):
gowinhome = os.getenv("GOWINHOME")
if not gowinhome:
raise Exception("GOWINHOME not set")
# {package: speed} vendor file
speeds = {}
with open(f"{gowinhome}/IDE/data/device/device_info.csv", mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file, fieldnames =
["unused_id", "partnumber", "series", "device", "package", "voltage", "speed"])
for row in csv_reader:
if row['device'] != device:
continue
speeds.update({row['partnumber']: row['speed']})
global _pindef_index
# _pindef_index = {}
res = {}
with open(f"{gowinhome}/IDE/data/device/device_package.csv", mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file, fieldnames =
["unused_id", "partnumber", "series", "device", "package", "filename"])
for row in csv_reader:
if row['device'] != device:
continue
res[row['partnumber']] = (row['package'], device, speeds[row['partnumber']])
_pindef_index[(row['device'], row['package'])] = \
f"{gowinhome}/IDE/data/device/{row['filename']}"
return res
def get_pins(device, package, special_pins=False):
df = get_package(device, package, special_pins)
res = {}
for pin in df:
res.setdefault(str(pin['BANK']), []).append(str(pin['INDEX']))
return res
def get_bank_pins(device, package):
df = get_package(device, package, VeryTrue)
res = {}
for pin in df:
res[pin['NAME']] = str(pin['BANK'])
return res
def get_locs(device, package, special_pins=False):
df = get_package(device, package, special_pins)
res = set()
for pin in df:
res.update({pin['NAME']})
return res
def get_pin_locs(device, package, special_pins=False):
df = get_package(device, package, special_pins)
res = {}
for pin in df:
cfgs = []
if 'CFG' in pin.keys():
cfgs = pin['CFG'].split('/')
res[str(pin['INDEX'])] = (pin['NAME'], cfgs)
return res
def get_clock_locs(device, package):
df = get_package(device, package, True)
return [(pin['NAME'], *pin['CFG'].split('/')) for pin in df
if 'CFG' in pin.keys() and pin['CFG'].startswith("GCLK")]
# { name : (is_diff, is_true_lvds, is_positive)}
def get_diff_cap_info(device, package, special_pins=False):
df = get_package(device, package, special_pins)
res = {}
# If one pin of the pair is forbidden for the diff IO,
# we can determine this only after we read the data of all pairs
positive = {}
negative = {}
for pin in df:
is_positive = False
is_diff = 'DIFF' in pin.keys()
if not is_diff:
res[str(pin['NAME'])] = (is_diff, is_true_lvds, is_positive)
continue
is_true_lvds = 'TRUELVDS' in pin.keys()
if pin['DIFF'] == 'P':
is_positive = True
positive[str(pin['NAME'])] = (is_diff, is_true_lvds, is_positive, str(pin['PAIR']))
else:
is_positive = False
negative[str(pin['NAME'])] = (is_diff, is_true_lvds, is_positive)
# check the pairs
for pos_name, pos_flags in positive.items():
neg_name = pos_flags[-1]
if neg_name in negative.keys():
res.update({pos_name : pos_flags[0:-1]})
res.update({neg_name : negative[neg_name]})
return res
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/pindef.py
|
pindef.py
|
from multiprocessing.dummy import Pool
import pickle
import json
import re
from apycula import tiled_fuzzer
from apycula import codegen
from apycula import pindef
from apycula import chipdb
from apycula import fuse_h4x
from apycula import gowin_unpack
from apycula.wirenames import clknumbers
def dff(mod, cst, row, col, clk=None):
"make a dff with optional clock"
name = tiled_fuzzer.make_name("DFF", "DFF")
dff = codegen.Primitive("DFF", name)
dff.portmap['CLK'] = clk if clk else name+"_CLK"
dff.portmap['D'] = name+"_D"
dff.portmap['Q'] = name+"_Q"
mod.wires.update(dff.portmap.values())
mod.primitives[name] = dff
cst.cells[name] = (row, col, 0, 'A') # f"R{row}C{col}"
return dff.portmap['CLK']
def ibuf(mod, cst, loc, clk=None):
"make an ibuf with optional clock"
name = tiled_fuzzer.make_name("IOB", "IBUF")
iob = codegen.Primitive("IBUF", name)
iob.portmap["I"] = name+"_I"
iob.portmap["O"] = clk if clk else name+"_O"
mod.wires.update([iob.portmap["O"]])
mod.inputs.update([iob.portmap["I"]])
mod.primitives[name] = iob
cst.ports[name] = loc
return iob.portmap["O"]
with open(f"{tiled_fuzzer.gowinhome}/IDE/share/device/{tiled_fuzzer.device}/{tiled_fuzzer.device}.fse", 'rb') as f:
fse = fuse_h4x.readFse(f)
with open(f"{tiled_fuzzer.device}.json") as f:
dat = json.load(f)
with open(f"{tiled_fuzzer.device}_stage1.pickle", 'rb') as f:
db = pickle.load(f)
# init pindef
pindef.all_packages(tiled_fuzzer.device)
clock_pins = pindef.get_clock_locs(
tiled_fuzzer.device,
tiled_fuzzer.params['package'])
# pins appear to be differential with T/C denoting true/complementary
true_pins = [p[0] for p in clock_pins if "GCLKT" in p[1]]
pool = Pool()
def quadrants():
mod = codegen.Module()
cst = codegen.Constraints()
ibuf(mod, cst, true_pins[2], clk="myclk")
pnr = tiled_fuzzer.run_pnr(mod, cst, {})
modules = []
constrs = []
idxes = []
for i in range(2, db.cols):
for j in [2, db.rows-3]: # avoid bram
if "DFF0" not in db.grid[j-1][i-1].bels:
print(i, j)
continue
mod = codegen.Module()
cst = codegen.Constraints()
ibuf(mod, cst, true_pins[0], clk="myclk")
dff(mod, cst, j, i, clk="myclk")
modules.append(mod)
constrs.append(cst)
idxes.append((j, i))
for i in [2, db.cols-2]:
for j in range(2, db.rows):
if "DFF0" not in db.grid[j-1][i-1].bels:
print(i, j)
continue
mod = codegen.Module()
cst = codegen.Constraints()
ibuf(mod, cst, true_pins[0], clk="myclk")
dff(mod, cst, j, i, clk="myclk")
modules.append(mod)
constrs.append(cst)
idxes.append((j, i))
pnr_res = pool.map(lambda param: tiled_fuzzer.run_pnr(*param, {}), zip(modules, constrs))
res = {}
for (row, col), (mybs, *_) in zip(idxes, pnr_res):
sweep_tiles = fuse_h4x.tile_bitmap(fse, mybs^pnr.bitmap)
# find which tap was used
taps = [r for (r, c, typ), t in sweep_tiles.items() if typ in {13, 14, 15, 16, 18, 19}]
# find which center tile was used
t8x = [(r, c) for (r, c, typ), t in sweep_tiles.items() if typ >= 80 and typ < 90]
rows, cols, _ = res.setdefault(t8x[0], (set(), set(), taps[0]))
rows.add(row-1)
cols.add(col-1)
return res
def center_muxes(ct, rows, cols):
"Find which mux drives which spine, and maps their inputs to clock pins"
fr = min(rows)
dff_locs = [(fr+1, c+1) for c in cols][:len(true_pins)]
mod = codegen.Module()
cst = codegen.Constraints()
ibufs = [ibuf(mod, cst, p) for p in true_pins]
dffs = [dff(mod, cst, row, col) for row, col in dff_locs]
pnr = tiled_fuzzer.run_pnr(mod, cst, {})
modules = []
constrs = []
for i, pin in enumerate(true_pins):
mod = codegen.Module()
cst = codegen.Constraints()
ibufs = [ibuf(mod, cst, p) for p in true_pins]
dffs = [dff(mod, cst, row, col) for row, col in dff_locs]
mod.assigns = list(zip(dffs, ibufs))[:i+1]
modules.append(mod)
constrs.append(cst)
pnr_res = pool.map(lambda param: tiled_fuzzer.run_pnr(*param, {}), zip(modules, constrs))
gb_sources = {}
gb_destinations = {}
src_seen = set()
dst_seen = set()
base = pnr.bitmap
for i, (bs_sweep, *_) in enumerate(pnr_res):
pin = true_pins[i]
new = base ^ bs_sweep
tiles = chipdb.tile_bitmap(db, new)
try:
_, _, clk_pips = gowin_unpack.parse_tile_(db, ct[0], ct[1], tiles[ct], noalias=True)
dest, = set(clk_pips.keys()) - dst_seen
dst_seen.add(dest)
src, = set(clk_pips.values()) - src_seen
src_seen.add(src)
except ValueError:
# it seems this uses a dynamically configurable mux routed to VCC/VSS
continue
print(i, pin, src, dest)
gb_destinations[(ct[1], i)] = dest
gb_sources[src] = pin
return gb_sources, gb_destinations
def taps(rows, cols):
"Find which colunm is driven by which tap"
# conver to sorted list of 1-indexed vendor constraints
rows = [row+1 for row in sorted(rows)]
cols = [col+1 for col in sorted(cols)]
modules = []
constrs = []
locs = []
# use a different row for each clock
# row by row, column by column, hook up the clock to the dff
# in the old IDE row 1 always used clock 1 and so forth
for col in cols:
for gclk, row in enumerate(rows[:len(true_pins)]):
mod = codegen.Module()
cst = codegen.Constraints()
clks = [ibuf(mod, cst, p) for p in true_pins]
for i, clk in zip(rows, clks):
flop = dff(mod, cst, i, col)
if i <= row:
mod.assigns.append((flop, clk))
modules.append(mod)
constrs.append(cst)
locs.append((gclk, col-1))
pnr_res = pool.map(lambda param: tiled_fuzzer.run_pnr(*param, {}), zip(modules, constrs))
last_dffcol = None
seen_primary_taps = set()
seen_secondary_taps = set()
seen_spines = set()
clks = {}
for (gclk, dff_col), (sweep_bs, *_) in zip(locs, pnr_res):
sweep_tiles = chipdb.tile_bitmap(db, sweep_bs)
if dff_col != last_dffcol:
seen_primary_taps = set()
seen_secondary_taps = set()
seen_spines = set()
last_dffcol = dff_col
tap = None
print("#"*80)
print("gclk", gclk, "dff_col", dff_col)
for loc, tile in sweep_tiles.items():
row, col = loc
_, _, clk_pips = gowin_unpack.parse_tile_(db, row, col, tile, noalias=True)
spines = set(s for s in clk_pips.keys() if s.startswith("SPINE"))
new_spines = spines - seen_spines
seen_spines.update(spines)
print(clk_pips.keys())
if "GT00" in clk_pips and col not in seen_primary_taps:
tap = col
seen_primary_taps.add(col)
if "GT10" in clk_pips and col not in seen_secondary_taps:
tap = col
seen_secondary_taps.add(col)
print("loc", row, col, "tap", tap, "new spines", new_spines)
# if tap == None: breakpoint()
clks.setdefault(gclk, {}).setdefault(tap, set()).add(dff_col)
print(clks)
return clks
pin_re = re.compile(r"IO([TBRL])(\d+)([A-Z])")
banks = {'T': [(0, n) for n in range(db.cols)],
'B': [(db.rows-1, n) for n in range(db.cols)],
'L': [(n, 0) for n in range(db.rows)],
'R': [(n, db.cols-1) for n in range(db.rows)]}
def pin2loc(name):
side, num, pin = pin_re.match(name).groups()
return banks[side][int(num)-1], "IOB"+pin
def pin_aliases(quads, srcs):
aliases = {}
for ct in quads.keys():
for mux, pin in srcs.items():
(row, col), bel = pin2loc(pin)
iob = db.grid[row][col].bels[bel]
iob_out = iob.portmap['O']
aliases[ct[0], ct[1], mux] = row, col, iob_out
return aliases
def spine_aliases(quads, dests, clks):
aliases = {}
for ct, (_, _, spine_row) in quads.items():
for clk, taps in clks[ct].items():
for tap in taps.keys():
try:
dest = dests[ct[1], clk]
except KeyError:
continue
if 'UNK' not in dest: # these have an unknown function
aliases[spine_row, tap, dest] = ct[0], ct[1], dest
return aliases
# add border cells
def add_rim(rows, cols, spine_row):
if 1 in rows:
rows.add(0)
if tiled_fuzzer.device.startswith("GW1N-9"):
rows.add(9)
else:
rows.add(spine_row - 1)
if max(rows) > spine_row and spine_row != 1:
rows.update({row for row in range(max(rows) + 1, db.rows)})
if tiled_fuzzer.device.startswith("GW1N-9"):
rows.add(18)
if 1 in cols:
cols.add(0)
elif db.cols - 2 in cols:
cols.add(db.cols - 1)
return rows, cols
def tap_aliases(quads):
aliases = {}
for _, (rows, cols, spine_row) in quads.items():
add_rim(rows, cols, spine_row)
for col in cols:
if col == dat['center'][1] - 1:
continue
for row in rows:
for src in ["GT00", "GT10"]:
if row != spine_row:
aliases[row, col, src] = spine_row, col, src
return aliases
def branch_aliases(quads, clks):
aliases = {}
for ct, (rows, _, spine_row) in quads.items():
for clk, taps in clks[ct].items():
if clk < 4:
src = "GBO0"
else:
src = "GBO1"
for tap, branch_cols in taps.items():
add_rim(rows, branch_cols, spine_row)
for row in rows:
for col in branch_cols:
aliases[row, col, f"GB{clk}0"] = row, tap, src
return aliases
def get_bufs_bits(fse, ttyp, win, wout):
wi = clknumbers[win]
wo = clknumbers[wout]
fuses = []
for rec in fse[ttyp]['wire'][38]:
if rec[0] == wi and rec[1] == wo:
fuses = chipdb.unpad(rec[2:])
break
return {fuse_h4x.fuse_lookup(fse, ttyp, f) for f in fuses}
# create aliases and pips for long wires
def make_lw_aliases(fse, dat, db, quads, clks):
# branches
# {lw#: {tap_col: {cols}}
taps = {}
lw_taps = [-1, -1, -1, -1]
any_mux = list(clks.keys())[0]
for gclk in range(4):
if gclk not in clks[any_mux].keys():
# XXX
continue
lw_taps[gclk] = min(clks[any_mux][gclk].keys())
if -1 in lw_taps:
# XXX GW1NZ-1 temporary hack
if lw_taps.count(-1) != 1:
raise Exception("Inconsistent clock tap columns, something is went wrong with the clock detection.")
else:
lw_taps[lw_taps.index(-1)] = 0 + 1 + 2 + 3 - 1 - sum(lw_taps)
print(" lw_taps = ", lw_taps)
for lw in range(4):
tap_col = lw_taps[lw]
for col in range(db.cols):
if (col > tap_col + 2) and (tap_col + 4 < db.cols):
tap_col += 4
taps.setdefault(lw, {}).setdefault(tap_col, set()).add(col)
for row in range(db.rows):
for lw, tap_desc in taps.items():
for tap_col, cols in tap_desc.items():
for col in cols:
db.aliases.update({(row, col, f'LB{lw}1') : (row, tap_col, f'LBO0')})
db.aliases.update({(row, col, f'LB{lw + 4}1') : (row, tap_col, f'LBO1')})
if __name__ == "__main__":
if True:
quads = quadrants()
srcs = {}
dests = {}
clks = {}
for ct, (rows, cols, _) in quads.items():
# I reverse the pins here because
# the 8th mux is not fuzzed presently
true_pins.reverse()
qsrcs, qdests = center_muxes(ct, rows, cols)
srcs.update(qsrcs)
dests.update(qdests)
clks[ct] = taps(rows, cols)
print(" quads =", quads)
print(" srcs =", srcs)
print(" dests =", dests)
print(" clks =", clks)
pa = pin_aliases(quads, srcs)
sa = spine_aliases(quads, dests, clks)
ta = tap_aliases(quads)
ba = branch_aliases(quads, clks)
# print(pa)
# print(sa)
# print(ta)
# print(ba)
db.aliases.update(pa)
db.aliases.update(sa)
db.aliases.update(ta)
db.aliases.update(ba)
# long wires
make_lw_aliases(fse, dat, db, quads, clks)
with open(f"{tiled_fuzzer.device}_stage2.pickle", 'wb') as f:
pickle.dump(db, f)
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/apycula/clock_fuzzer.py
|
clock_fuzzer.py
|
# SDRAM
Gowin devices with the R suffic such as the GW1NR-9 have built-in SDRAM.
This SDRAM is a System-in-Package wirdebonded of the shelf SDRAM module.
So there isn't so much to fuzz, you just have to know the pinout and the model.
Gowin has been so kind as to provide LiteX with [the details](https://github.com/litex-hub/litex-boards/blob/8a33c2aa312dddc66297f7cd6e39107fda5a2efb/litex_boards/targets/trenz_tec0117.py#L92-L118) of the model and pinout. That is... the magic wire names that result in the vendor placing the IOB in the correct place.
For the open source tools, you can't use the magic wire names. But what you can do is feed the magic wire names to the vendor and look at the generated placement.
This is what has been done in `/legacy/sdram`, which is a standalone script not tied into the rest of Apicula.
The result for GW1NR-9 is as below. A daring adventurer could use these to develop their own SDRAM controller or try to add support for LiteX on open source Gowin tools.
```
IO_sdram_dq(0) -> R29C26_IOA
IO_sdram_dq(1) -> R29C27_IOA
IO_sdram_dq(2) -> R29C35_IOA
IO_sdram_dq(3) -> R29C36_IOA
IO_sdram_dq(4) -> R29C37_IOA
IO_sdram_dq(5) -> R29C38_IOA
IO_sdram_dq(6) -> R29C39_IOA
IO_sdram_dq(7) -> R29C40_IOA
IO_sdram_dq(8) -> R29C16_IOB
IO_sdram_dq(9) -> R29C17_IOB
IO_sdram_dq(10) -> R29C18_IOA
IO_sdram_dq(11) -> R29C18_IOB
IO_sdram_dq(12) -> R29C19_IOB
IO_sdram_dq(13) -> R29C20_IOB
IO_sdram_dq(14) -> R29C21_IOB
IO_sdram_dq(15) -> R29C22_IOB
O_sdram_clk -> R1C4_IOB
O_sdram_cke -> R1C9_IOA
O_sdram_cs_n -> R1C35_IOB
O_sdram_cas_n -> R1C40_IOB
O_sdram_ras_n -> R1C40_IOA
O_sdram_wen_n -> R1C44_IOA
O_sdram_addr(0) -> R1C31_IOA
O_sdram_addr(1) -> R1C28_IOA
O_sdram_addr(2) -> R1C27_IOA
O_sdram_addr(3) -> R1C26_IOA
O_sdram_addr(4) -> R1C22_IOB
O_sdram_addr(5) -> R1C21_IOB
O_sdram_addr(6) -> R1C18_IOB
O_sdram_addr(7) -> R1C18_IOA
O_sdram_addr(8) -> R1C14_IOB
O_sdram_addr(9) -> R1C14_IOA
O_sdram_addr(10) -> R1C31_IOB
O_sdram_addr(11) -> R1C9_IOB
O_sdram_dqm(0) -> R1C44_IOB
O_sdram_dqm(1) -> R1C4_IOA
O_sdram_ba(0) -> R1C35_IOA
O_sdram_ba(1) -> R1C32_IOA
```
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/doc/sdram.md
|
sdram.md
|
# Muxes
Each CFU contains 8 muxes that are used to create primitives like LUT5, LUT6, LUT7, LUT8, MUX4, MUX8, MUX16 and MUX32. These muxes are interconnected in a certain fixed way and always send the result to the outputs of the muxes: OF0-OF7 using the SEL0-7 switching signals.

These fixed links impose restrictions on the possible placement of primitives:
* LUT5, LUT6, MUX4, MUX8 can be placed in one CLS;
* for LUT6, MUX8 CLS index must be even number;
* LUT7, MUX16 can be placed in one cell;
* LUT8, MUX32 occupy two cells.
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/doc/muxes.md
|
muxes.md
|
# Architecture
Gowin FPGAs have a LUT4 architecture common to many smaller FPGA architectures. The FPGA consist of a grid of tiles with I/O buffers around the edges, rows of special-function blocks such as BRAM, and a large grid configurable logic units.

Each Configurable logic unit consists of 8 LUT4s grouped in 4 slices, of which 3 have data flip-flops. Each slice shares certain resources such as clock inputs and reset lines.
Each LUT4 has 4 inputs and one output that is directly connected to the data flip-flop input. The LUT output can be used independently, but the flip-flop is always used through the LUT. Each pair of flip flops has data in and out, clock, clock enable, and set/reset. Each pair of flip-flops can be configured for rising edge or falling edge sensitivity, and asynchronous or synchronous set or clear.
These tiles are connected with various multiplexers to adjacent tiles as well as global clock lines. Each tile has 8 tile-local wires, 4 one-hop wires of which 2 are shared between north/south and east/west, 8 two-hop wires with one-hop taps, and 4 eight-hop wires with four-hop taps. An overview of all wires can be seen below.

There are various inputs to the global clock routing, such as GCLK pins and PLL outputs. These are hardwired to the centre tile of the FPGA. In the center tile are some muxes that route one of the inputs to a certain spine. There are 4 primary and 4 secondary spines in each direction, running horizontally away from the cetre tile.
At every tile along the spine are two fuses that enable a primary/secondary tap that runs along the entire height of the FPGA. Each tile is hardwired to connect to one primary and/or one secondary tap. Each spine can therefore only be tapped at a few columns per side, depending on the size of the FPGA.
Each tile in the FPGA has one fuse that connects the vertically tap that runs through that tile to a branch that runs horizontally acros the FPGA. This one fuse controls both primary and secondary taps. In each tile GB0-3 correspond to the 4 primary branches, and GB4-7 to the secondary ones.

The bitstream consist of frames. Frames describe one row of bits on the FPGA tile grid. Frames are padded to full bytes, and verified with a CRC-16 checksum. These rows are stacked on top of each other to describe a bitmap that is overlaid on the FPGA tile grid.
The number of tiles on the grid depend on the specific FPGA model. A tile is roughly 60x24 bits, with I/O buffers and some special tiles being a few rows or columns larger. A common logic tile has the LUTs and flip-flops in the bottom 4 rows, with the top 20 rows being filled with multiplexers. An overview of the bitstream layout of LUTs, flip-flops, and multiplexers in a logic tile can be seen below.

|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/doc/architecture.md
|
architecture.md
|
Long wires are used for fast fanout signals. The receivers can be either CLK, CE, SR or LUT inputs.
You can figure out a lot already from the syntax of the CLOCK_LOC constraint:
CLOCK_LOC "net-name" clocks=fanout [quadrant]
clocks:
- BUFG[0-7] eight master clocks
- BUFS[0-7] eight long lines
- LOCAL_CLOCK "not to route clock line" (???)
fanout:
- CLK
- CE
- SR set/reset/clear/preset
- LOGIC jther than the above
the | can be used as OR.
So we have 8 long wires per quandrant, which btw can be set as LEFT (L), RIGHT (R) for GW1N series and as TOPLEFT (TL), TOPRIGHT (TR), BOTTOMLEFT (BT) and BOTTOMRIGHT (BR) for GW1N-9/GW1NR-9/GW1N-9C/GW1NR-9C, GW2A series.
The following description focuses on the GW1N-1 chip, which has two quadrants and therefore only long wires with the letter T (top), the chips with four quadrants will also have long wires with the letter B (bottom).

# A
Here the long wires LWT[0-7] are connected to the SS00 and SS40 buses of the left and right quadrants. The corresponding fuses must be reset to 0 in order to connect the long wire to the bus.
There is no choice to which bus (SS00 or SS40), only connect/disconnect.
The buses consist of spines that are different for the right and left quadrant.
| Bus/Code | Spine/Code | Quadrant |
|:--------:|:---------------:|:--------:|
| SS00 289 | LWSPINETL0 1001 | Left |
| SS00 289 | LWSPINETL1 1002 | Left |
| SS00 289 | LWSPINETL2 1003 | Left |
| SS00 289 | LWSPINETL3 1004 | Left |
| SS40 290 | LWSPINETL4 1005 | Left |
| SS40 290 | LWSPINETL5 1006 | Left |
| SS40 290 | LWSPINETL6 1007 | Left |
| SS40 290 | LWSPINETL7 1008 | Left |
| SS00 289 | LWSPINETR0 1009 | Right |
| SS00 289 | LWSPINETR1 1010 | Right |
| SS00 289 | LWSPINETR2 1011 | Right |
| SS00 289 | LWSPINETR3 1012 | Right |
| SS40 290 | LWSPINETR4 1013 | Right |
| SS40 290 | LWSPINETR5 1014 | Right |
| SS40 290 | LWSPINETR6 1015 | Right |
| SS40 290 | LWSPINETR7 1016 | Right |
Fuses (table 38):
| Long wire/Code | Spine/Code | Fuses/Bits |
|:---------------:|:---------------:|:------------:|
| LWT0 32 | LWSPINETL0 1001 | 1462 (23, 58)|
| LWT1 33 | LWSPINETL1 1002 | 1455 (19, 59)|
| LWT2 34 | LWSPINETL2 1003 | 1450 (14, 59)|
| LWT3 35 | LWSPINETL3 1004 | 1456 (20, 58)|
| LWT4 36 | LWSPINETL4 1005 | 1625 (24, 0) |
| LWT5 37 | LWSPINETL5 1006 | 1446 (10, 59)|
| LWT6 38 | LWSPINETL6 1007 | 1442 (6, 59)|
| LWT7 39 | LWSPINETL7 1008 | 1642 (27, 59)|
| LWT0 32 | LWSPINETR0 1009 | 1462 (24, 59)|
| LWT1 33 | LWSPINETR1 1010 | 1453 (17, 59)|
| LWT2 34 | LWSPINETR2 1011 | 1449 (13, 59)|
| LWT3 35 | LWSPINETR3 1012 | 1461 (22, 59)|
| LWT4 36 | LWSPINETR4 1013 | 1458 (21, 58)|
| LWT5 37 | LWSPINETR5 1014 | 1445 (9, 59)|
| LWT6 38 | LWSPINETR6 1015 | 1441 (5, 59)|
| LWT7 39 | LWSPINETR7 1016 | 1643 (26, 58)|
# B
Connecting the spines with the vertical wires. Here shown is the vertical wire from the `SS40` bus, which is not drawn in the picture of the whole chip because it would result in a mess, but these hidden wires go in pairs with the drawn wires from the `SS00` bus.
Only one spine from each bus is connected, which one is determined from the column of the cell using the table (is specified for the GW1N-1 chip, tables for others can be found in the code):
| Col mod 4 | SS00 spine | SS40 spine |
|:---------:|:----------:|:----------:|
| 0 | 1 | 5 |
| 1 | 0 | 4 |
| 2 | 3 | 7 |
| 3 | 2 | 6 |
Important: taps are easy to determine, but which columns are served by which tap is a little more difficult to determine by the rule:
If there is a tap to the right of the current tap for a given long wire, then the current tap serves two columns to the right, otherwise all columns to the right.
For example GW1N-1, chip width 20 columns, long wire #1, this is how the columns are serviced with taps:
| col# | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
|:-------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|
| tap pip | 0 | | | | 1 | | | | 2 | | | | 3 | | | | 4 | | | |
| cells | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 2 | 2 | 2 | 2 | 3 | 3 | 3 | 3 | 4 | 4 | 4 | 4 | 4 |
For long wire #2:
| col# | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
|:-------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|
| tap pip | | | | 0 | | | | 1 | | | | 2 | | | | 3 | | | | 4 |
| cells | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 2 | 2 | 2 | 2 | 3 | 3 | 3 | 3 | 4 | 4 |
One more thing quadrants are not squares (horrible): their borders are uneven because the belonging of a cell to one or another quadrant is determined not by its coordinates, but by the coordinates of the tap that serves it.
For example column 9 is central for GW1N-1 so from the table above cells in column 10 belong to the left quadrant for the long wire #1 as column 10 is served by tap in column 8, but the same cell belongs to the right quadrant for the long wire #2 as it is served by tap in column 11.
What the `A6`, `A7`, `B6`, `B7`, `F6`, `F7` inputs are is still unclear.
It is interesting that it is possible to connect the same spine to `LT02` and `LT13`.
# C
Here the vertical taps `LT01` and `LT04` are connected to the horizontal branches `LBO0` and `LBO1`.
As usual the column determines which long wire the branch is connected to, to some extent --- we remember that not only the buses are connected to the taps, but also half a dozen other unknown sources.
And one more thing: the taps are not named `LT02` and `LT13`, which suggests that there is some other mechanism here.
# D
For type 12:
| | LB01 | LB11 | LB21 | LB31 | LB41 | LB51 | LB61 | LB71 |
|:--------:|:----:|:----:|:----:|:----:|:----:|:----:|:----:|:----:|
| X01 56 | | | | | X | X | X | X |
| X02 57 | | | | | X | X | X | X |
| X03 58 | | | | | X | X | X | X |
| X04 59 | | | | | X | X | X | X |
| X05 60 | | | | | X | X | X | X |
| X06 61 | | | | | X | X | X | X |
| X07 62 | | | | | X | X | X | X |
| X08 63 | | | | | X | X | X | X |
| N200 76 | X | | | | | | | |
| N210 77 | | X | | | | | | |
| N220 78 | | | X | | | | | |
| N230 79 | | | | X | | | | |
| N240 80 | | | | | X | | | |
| N250 81 | | | | | | X | | |
| N260 82 | | | | | | | X | |
| N270 83 | | | | | | | | X |
| S200 84 | X | | | | | | | |
| S210 85 | | X | | | | | | |
| S220 86 | | | X | | | | | |
| S230 87 | | | | X | | | | |
| S240 88 | | | | | X | | | |
| S250 89 | | | | | | X | | |
| S260 90 | | | | | | | X | |
| S270 91 | | | | | | | | X |
| E200 92 | X | | | | | | | |
| E210 93 | | X | | | | | | |
| E220 94 | | | X | | | | | |
| E230 95 | | | | X | | | | |
| E240 96 | | | | | X | | | |
| E250 97 | | | | | | X | | |
| E260 98 | | | | | | | X | |
| E270 99 | | | | | | | | X |
| W200 100 | X | | | | | | | |
| W210 101 | | X | | | | | | |
| W220 102 | | | X | | | | | |
| W230 103 | | | | X | | | | |
| W240 104 | | | | | X | | | |
| W250 105 | | | | | | X | | |
| W260 106 | | | | | | | X | |
| W270 107 | | | | | | | | X |
| CLK0 124 | X | X | X | | X | X | X | X |
| CLK1 125 | X | X | X | | X | X | X | X |
| CLK2 126 | X | X | X | | X | X | X | X |
| LSR0 127 | X | X | X | X | | | | X |
| LSR1 128 | X | X | X | X | | | | X |
| LSR2 129 | X | X | X | X | | | | X |
| CE0 130 | X | X | X | X | | | | X |
| CE1 131 | X | X | X | X | | | | X |
| CE2 132 | X | X | X | X | | | | X |
# E
The two cells in the center of the chip with types 81 and 82 are responsible for sources for long wires. Here I also add two adjacent cells with types 83 and 84. 83 contains 7 muxes for lines 40--46 and 84 contains a mux for line 47.
These lines 40--47 are connected to buses `SS00` and `SS40` in the bottom row of the chip. But so far I can't get the vendor IDE to use them so their purpose is unclear.
The `+` marked sources whose locations are known and I will explain how to find them.
| | LW0 | LW1 | LW2 | LW3 | LW4 | LW5 | LW6 | LW7 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 |
|:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|
| 121 | X | | | | | | | X | X | | | | | | | X |
| 122 | | X | | | | | X | | | X | | | | | X | |
| 123 | | | X | | | | X | | | | X | | | | X | |
| 124 | | | | X | | | | X | | | | X | | | | X |
| 125 | | | X | | X | | | | | | X | | X | | | |
| 126 | | | | X | | X | | | | | | X | | X | | |
| 127 | | X | | | | X | | | | X | | | | X | | |
| 128 | X | | | | X | | | | X | | | | X | | | |
| 154 | X | X | X | | X | X | X | | X | X | X | | X | X | X | |
| +155 | | X | X | X | | X | X | X | | X | X | X | | X | X | X |
| 156 | X | X | X | | X | X | X | | X | X | X | | X | X | X | |
| +157 | | X | X | X | | X | X | X | | X | X | X | | X | X | X |
| 158 | X | X | | X | X | X | | X | X | X | | X | X | X | | X |
| 159 | X | X | | X | X | X | | X | X | X | | X | X | X | | X |
| 160 | X | | X | X | X | | X | X | X | | X | X | X | | X | X |
| 161 | X | | X | X | X | | X | X | X | | X | X | X | | X | X |
| +162 | X | | | | | X | X | X | X | | | | | X | X | X |
| +163 | | X | X | X | X | | | | | X | X | X | X | | | |
| +164 | X | | X | X | | X | | | X | | X | X | | X | | |
| +165 | | X | | | X | | X | X | | X | | | X | | X | X |
| +166 | X | X | | X | | | X | | X | X | | X | | | X | |
| +167 | | | X | | X | X | | X | | | X | | X | X | | X |
| +168 | X | X | X | | | | | X | X | X | X | | | | | X |
| +169 | | | | X | X | X | X | | | | | X | X | X | X | |
The `dat` dictionary has a table `UfbIns`, whose entries have the structure `[row number, column number, pip code]`. Thus the entry `[4, 1, 126]` means that CLK2 pip in cell [4, 1] is an entry point to somewhere.
Experimentally it is found that CLK2 are the entry points to the long wire system and it is these points that are marked with a `+` sign in the table above.
The record number in the table `UfbIns` is as the code from the table above minus 104.
# F
This cell contains a connection to the `SS00` and `SS40` buses of some obscure wires. They cannot be made to be used even if you place all the primitives literally next to the bottom row - the buses in the top row will still be used.
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/doc/longwires.md
|
longwires.md
|
# Command structure
## Config Frames
`0x3B` looks like the “load config” command (similar to `LSC_PROG_INCR_RTI` for ECP5)
`0x80` CRC enable
`0x02` number of frames MSB
`0xC8` number of frames LSB
Followed by configuration frames (including EBR) (`0x2C8`=712) of them in this case, gw1nr9)
Each frame seems to be:
1. Frame data, with 1s appended at the start to make it an multiple of 8 bits (guess, similar to ECP5)
2. CRC-16 that matches “CRC-16/ARC” algorithm at https://crccalc.com/ but endian swapped (optional)
3. 6x `0xFF` bytes
At end of bitstream 18 more `0xFF` bytes followed by 2 bytes `0x34` `0x73` which are the CRC as above of the last 24 `0xFF` bytes
First CRC is special, like ECP5, as it also covers the commands after the preamble except the flash address.
## Preamble
20 `0xFF` bytes, followed by two bytes “file checksum” (for early Vendor tool release), followed by `0xFF` `0xFF` `0xA5` `0xC3`
For comparison ECP5 preamble is `0xFF` `0xFF` `0xBD` `0xB3`
“File checksum” matches value displayed in programmer, unlikely actually used by hardware
## Other commands
Command always followed by 3 “option” bytes, usually `0x00` except for “load config”, like ECP5 commands
`0x06` (or `0x86` when `bit_crc_check` not set) IDCODE check (similar to ECP5 `VERIFY_ID`)
Followed by 3x `0x00` bytes
Then the JTAG IDCODE
For GW1N-1: IDCODE `0x0900281B`; bytes here `0x09` `0x00` `0x28` `0x1B`
For GW1NR-9: IDCODE `0x1100581B`; bytes here `0x11` `0x00` `0x58` `0x1B`
For GW2AR-18: IDCODE `0x0000081B`; bytes here `0x00` `0x00` `0x08` `0x1B`
`0x10` (or `0x90` when `bit_crc_check` not set)
- [56:24]: unknown `0x00000000` (seems to be same for all devices and configs)
- [23:16]: value depending on `loading_rate` (value to determine) (or 0x00 when N/A)
- [15:14]: unknown `0x0`
- [13] : `1` when `bit_compress` set
- [12] : `1` when `program_done_bypass` set
- [11:0] : unknown `0x000`
`0x51` (or `0xD1` when `bit_crc_check` not set) Compress configuration
- [56:24] : unknown `0x00FFFFFF` (seems to be same for all devices and configs)
- [23:16] : `OxFF` for uncompressed bitstream or a value used to replace 8x `0x00` in compress mode
- [15:8] : `OxFF` for uncompressed bitstream or a value used to replace 4x `0x00` in compress mode
- [7:0] : `OxFF` for uncompressed bitstream or a value used to replace 2x `0x00` in compress mode
`0x0B` (or `0x8B` when `bit_crc_check` not set) only present when `bit_security` is set
Followed by 3x `0x00` bytes
`0xD2` Set SPI flash address (8 bytes)
- [56:32]: unknown, always `0x00FFFF`
- [31:0] : SPI flash address (or `0x00000000` if N/A)
`0x12` (or `0x92` when `bit_crc_check` not set) Unknown `0x000000`
Last command before `0x3B` (or `0xBB` when `bit_crc_check` not set) config frame load command, probably equiv to `LSC_INIT_ADDRESS`
- [23] : `1` when `bit_crc_check` set
- [22:16]: unknown, always `0x00`
- [15:0] : number of lines in configuration data section
`0x0A` set USERCODE (similar to `ISC_PROGRAM_USERCODE`)
Followed by 3x `0x00` bytes
Then the 4-byte USERCODE
`0x08` final command in bitstream, probably equivalent to ECP5 `ISC_PROGRAM_DONE`
Followed by 3x `0x00` bytes
`0xFF` NOP/padding
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/doc/commandstructure.md
|
commandstructure.md
|
# longval tables
The table entries are divided into two parts: key and fuses. The key is an ascending ordered list of non-repeating feature codes, padded with zeros to the right up to a length of 16 elements. Fuzes is an ascending ordered list of non-repeating numbers of fuzes, extended to the right by -1 to a length of 12 elements.
The feature codes change from board to board and no common recoding table has been found yet. So all codes below are correct for GW1N-1, for other boards empirical recoding is used (see beginning of file tiled_fuzzer.py).
In some cases, the key uses an as yet unknown feature that seems to be responsible for configuring the I/O logic. Voltage levels and I/O attributes do not depend on it, so this code is ignored when searching for a entry.
*The GW1N-4 boards stand out --- everything is different for them!*
## IOB tables
Correspondence of table numbers to pin names:
| A | B | C | D | E | F | G | H | I | J |
|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
| 23 | 24 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 |
Simple IO attributes and their detected features codes, if empty, no fuses are set.
`SLEW_RATE`
| Value | Code |
|:-----:|:----:|
| SLOW | |
| FAST | 42 |
example: `SLEW_RATE=FAST`:
[42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3377, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
`PULL_MODE`
| Value | Code |
|:------:|:----:|
| UP | |
| NONE | 45 |
| KEEPER | 44 |
| DOWN | 43 |
example: `PULL_MODE=DOWN`:
[43, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3342, 3357, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
`HYSTERESIS`
| Value | Code |
|:------:|:----------:|
| NONE | |
| HIGH | {57, 85} |
| H2L | {58, 85} |
| L2H | {59, 85} |
example: `HYSTERESIS=HIGH`:
[37, 57, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3352, 3374, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
See this *37*? This is the ignored part of the key.
Complex attributes take into account the current I/O standard. But the standard code is only in addition to the attribute code, and does not work as a separate single switch.
IO standard codes
| Value | Code |
|:---------:|:-----:|
| LVCMOS33 | 68 |
| LVCMOS25 | 67 |
| LVCMOS18 | 66 |
| LVCMOS15 | 65 |
| LVCMOS12 | 64 |
| SSTL25_I | 71 |
| SSTL25_II | 71 |
| SSTL33_I | |
| SSTL33_II | |
| SSTL18_I | 72 |
| SSTL18_II | 72 |
| SSTL15 | 74 |
| HSTL18_I | 72 |
| HSTL18_II | 72 |
| HSTL15_I | 74 |
| PCI33 | 69 |
`DRIVE`
| Value | Code |
|:-----:|:-----:|
| 4 | 48 |
| 8 | 50 |
| 12 | 51 |
| 16 | 52 |
| 24 | 54 |
The code for `DRIVE` is made up of the value from the above table plus {56} plus the standard code.
example: 'IO_TYPE=LVCMOS18, DRIVE=8':
[12, 50, 56, 66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3356, 3372, 3375, 3379, 3394, 3397, -1, -1, -1, -1, -1, -1]
`OPEN_DRAIN`
Perhaps the most difficult attribute at the moment. It uses the same fuses as `DRIVE`, setting one of them and clearing the other two. The procedure for determining the fuzes is epirical and is best seen in the tiled_fuzzer.py code.
| Value | Code |
|:-----:|:-----------:|
| ON | {55, 70} |
NOISE fuse: {55, 72}
example: 'OPEN_DRAIN=ON':
16mA LVCMOS33 fuse:
[12, 52, 56, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3210, 3238, 3245, 3263, 3273, 3281, -1, -1, -1, -1, -1, -1]
ON fuse:
[10, 55, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3273, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
NOISE fuse:
[7, 55, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3238, 3263, 3273, 3281, -1, -1, -1, -1, -1, -1, -1, -1]
Thus we clear {3210, 3245} and set {3273}.
## Tables of corner tiles
Corner tiles enable I/O banks and set logical levels.
Table 37.
The key includes the bank number, usually unchanged, but there are strange numbers like 10 or 30. which still need to be investigated.
Simple modes are found simply by the standard code:
| Value | Code |
|:---------:|:-----:|
| LVCMOS33 | 68 |
| LVCMOS25 | 67 |
| LVCMOS18 | 66 |
| LVCMOS15 | 65 |
| LVCMOS12 | 64 |
example: 'IO_TYPE=LVCMOS15'
[2, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2797, 2813, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
Complex modes are obtained by adding fuse 79:
| Mode | Fuses |
|:---------:|:-----------------------:|
| SSTL15 | fuses(65) + fuses(79) |
| HSTL18_I | fuses(66) + fuses(79) |
| SSTL25_I | fuses(67) + fuses(79) |
| SSTL33_I | fuses(68) + fuses(79) |
example: 'IO_TYPE=SSTL15'
Fuse 79:
[3, 79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2229, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
Fuse 65:
[3, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2181, 2197, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
Thus we set {2181, 2197, 2229}
TODO: Describe the situation when all pins in the bank are working as input
|
Apycula
|
/Apycula-0.9.0a1.tar.gz/Apycula-0.9.0a1/doc/longval-tables.md
|
longval-tables.md
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.