Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +20 -0
- pypi-packages-metadata-000000000028.json +2 -2
- pypi-packages-metadata-000000000082.json +2 -2
- pypi-packages-metadata-000000000120.json +2 -2
- pypi-packages-metadata-000000000124.json +2 -2
- pypi-packages-metadata-000000000135.json +2 -2
- pypi-packages-metadata-000000000155.json +0 -0
- pypi-packages-metadata-000000000205.json +2 -2
- pypi-packages-metadata-000000000221.json +2 -2
- pypi-packages-metadata-000000000237.json +2 -2
- pypi-packages-metadata-000000000242.json +2 -2
- pypi-packages-metadata-000000000266.json +2 -2
- pypi-packages-metadata-000000000291.json +2 -2
- pypi-packages-metadata-000000000297.json +0 -0
- pypi-packages-metadata-000000000351.json +3 -19
- pypi-packages-metadata-000000000354.json +2 -2
- pypi-packages-metadata-000000000378.json +2 -2
- pypi-packages-metadata-000000000393.json +2 -2
- pypi-packages-metadata-000000000414.json +0 -0
- pypi-packages-metadata-000000000423.json +0 -0
- pypi-packages-metadata-000000000427.json +2 -2
- pypi-packages-metadata-000000000434.json +0 -0
- pypi-packages-metadata-000000000444.json +3 -9
- pypi-packages-metadata-000000000463.json +2 -2
- pypi-packages-metadata-000000000471.json +0 -0
- pypi-packages-metadata-000000000501.json +0 -0
- pypi-packages-metadata-000000000604.json +0 -0
- pypi-packages-metadata-000000000611.json +0 -0
- pypi-packages-metadata-000000000677.json +0 -0
- pypi-packages-metadata-000000000742.json +0 -0
- pypi-packages-metadata-000000000770.json +0 -0
- pypi-packages-metadata-000000000836.json +0 -0
- pypi-packages-metadata-000000000872.json +0 -0
- pypi-packages-metadata-000000000969.json +0 -0
- pypi-packages-metadata-000000000993.json +0 -0
- pypi-packages-metadata-000000001028.json +0 -0
- pypi-packages-metadata-000000001029.json +0 -0
- pypi-packages-metadata-000000001055.json +0 -0
- pypi-packages-metadata-000000001072.json +0 -0
- pypi-packages-metadata-000000001081.json +0 -0
- pypi-packages-metadata-000000001083.json +0 -0
- pypi-packages-metadata-000000001148.json +0 -0
- pypi-packages-metadata-000000001181.json +0 -0
- pypi-packages-metadata-000000001248.json +0 -0
- pypi-packages-metadata-000000001263.json +0 -0
- pypi-packages-metadata-000000001324.json +0 -0
- pypi-packages-metadata-000000001343.json +0 -0
- pypi-packages-metadata-000000001362.json +0 -0
- pypi-packages-metadata-000000001363.json +0 -0
- pypi-packages-metadata-000000001392.json +0 -0
.gitattributes
CHANGED
@@ -1504,3 +1504,23 @@ pypi-packages-metadata-000000000515.json filter=lfs diff=lfs merge=lfs -text
|
|
1504 |
pypi-packages-metadata-000000000364.json filter=lfs diff=lfs merge=lfs -text
|
1505 |
pypi-packages-metadata-000000001614.json filter=lfs diff=lfs merge=lfs -text
|
1506 |
pypi-packages-metadata-000000001269.json filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1504 |
pypi-packages-metadata-000000000364.json filter=lfs diff=lfs merge=lfs -text
|
1505 |
pypi-packages-metadata-000000001614.json filter=lfs diff=lfs merge=lfs -text
|
1506 |
pypi-packages-metadata-000000001269.json filter=lfs diff=lfs merge=lfs -text
|
1507 |
+
pypi-packages-metadata-000000001181.json filter=lfs diff=lfs merge=lfs -text
|
1508 |
+
pypi-packages-metadata-000000002277.json filter=lfs diff=lfs merge=lfs -text
|
1509 |
+
pypi-packages-metadata-000000001392.json filter=lfs diff=lfs merge=lfs -text
|
1510 |
+
pypi-packages-metadata-000000002061.json filter=lfs diff=lfs merge=lfs -text
|
1511 |
+
pypi-packages-metadata-000000002238.json filter=lfs diff=lfs merge=lfs -text
|
1512 |
+
pypi-packages-metadata-000000000351.json filter=lfs diff=lfs merge=lfs -text
|
1513 |
+
pypi-packages-metadata-000000001362.json filter=lfs diff=lfs merge=lfs -text
|
1514 |
+
pypi-packages-metadata-000000000155.json filter=lfs diff=lfs merge=lfs -text
|
1515 |
+
pypi-packages-metadata-000000000414.json filter=lfs diff=lfs merge=lfs -text
|
1516 |
+
pypi-packages-metadata-000000000434.json filter=lfs diff=lfs merge=lfs -text
|
1517 |
+
pypi-packages-metadata-000000000423.json filter=lfs diff=lfs merge=lfs -text
|
1518 |
+
pypi-packages-metadata-000000000444.json filter=lfs diff=lfs merge=lfs -text
|
1519 |
+
pypi-packages-metadata-000000001363.json filter=lfs diff=lfs merge=lfs -text
|
1520 |
+
pypi-packages-metadata-000000001495.json filter=lfs diff=lfs merge=lfs -text
|
1521 |
+
pypi-packages-metadata-000000000471.json filter=lfs diff=lfs merge=lfs -text
|
1522 |
+
pypi-packages-metadata-000000001263.json filter=lfs diff=lfs merge=lfs -text
|
1523 |
+
pypi-packages-metadata-000000001343.json filter=lfs diff=lfs merge=lfs -text
|
1524 |
+
pypi-packages-metadata-000000001324.json filter=lfs diff=lfs merge=lfs -text
|
1525 |
+
pypi-packages-metadata-000000000297.json filter=lfs diff=lfs merge=lfs -text
|
1526 |
+
pypi-packages-metadata-000000000501.json filter=lfs diff=lfs merge=lfs -text
|
pypi-packages-metadata-000000000028.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1270a8b810db7972fe4be13ee96471817c7095d1a547c35dc287847910abec06
|
3 |
+
size 65820152
|
pypi-packages-metadata-000000000082.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6084833a5a82d2733d28de09370b3e4eea09611f19ff2369ecc69e5d168e1c5f
|
3 |
+
size 96048423
|
pypi-packages-metadata-000000000120.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eae51c42cbecfc6ad234a9e77b69486d8247bb7d4138e952ad592b03f157400c
|
3 |
+
size 84447626
|
pypi-packages-metadata-000000000124.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a43ebde3f468adafd9b2fe4c704f6bdf877ccb39048deb323a497831b1f6c678
|
3 |
+
size 85636954
|
pypi-packages-metadata-000000000135.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df0b6b24259b5de9e2e511c4902d77c1a4ad0204c8f5aea05904f9020b513cb1
|
3 |
+
size 59863270
|
pypi-packages-metadata-000000000155.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000205.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:37acf18ccf33c8f662bc35de9118542d9b2a274a95a6ba9e776154df0d7edaf1
|
3 |
+
size 57406156
|
pypi-packages-metadata-000000000221.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:72264c97309e30454c368000fc1d0bb9a363d1689827aa82d0153da50983a878
|
3 |
+
size 80722808
|
pypi-packages-metadata-000000000237.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a63f9b1e584a4e9b9f4875d9364e0975de0f977f2cac765cd11494fd6ba34eb
|
3 |
+
size 85973802
|
pypi-packages-metadata-000000000242.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:accaadbbfac24ab03791098ffc3cfb6a601397deecedad549669ba5eeb839bac
|
3 |
+
size 87427318
|
pypi-packages-metadata-000000000266.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3a9f94d6ad2f40b0954792669c4690ba6634e3a3bb7be2c6c0d1600e2a9f7977
|
3 |
+
size 76167726
|
pypi-packages-metadata-000000000291.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0196eb07e396270933b8f3838bd24d70e920067add1632f7345196786637020
|
3 |
+
size 77325613
|
pypi-packages-metadata-000000000297.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000351.json
CHANGED
@@ -1,19 +1,3 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
{"name":"pysqlx-core","version":"0.1.51b1","summary":"A fast and async SQL database wrapper for Python, with support for MySQL, PostgreSQL, SQLite and MS SQL Server.","description":"# __pysqlx-core__\n\n[](https://github.com/carlos-rian/pysqlx-core/actions?query=event%3Apush+branch%3Amain+workflow%3Aci)\n[](https://pypi.python.org/pypi/pysqlx-core)\n[](https://github.com/carlos-rian/pysqlx-core)\n[](https://github.com/carlos-rian/pysqlx-core/blob/main/LICENSE)\n[](https://pepy.tech/project/pysqlx-core)\n\npysqlx-core is an extremely fast Python library for communicating with various SQL databases.\n\nThis package provides the core functionality for [__PySQLX-Engine__](https://carlos-rian.github.io/pysqlx-engine/).\n\nThe package is currently a work in progress and subject to significant change.\n\n[__pysqlx-core__](https://pypi.org/project/pysqlx-core/) will be a separate package, required by [__pysqlx-engine__](https://carlos-rian.github.io/pysqlx-engine/).\n\nThis package is written entirely in Rust and compiled as a Python library using PyO3 and PyO3-Asyncio.\n\nThis core is not so friendly, but maybe you want to use it, feel free to suggest improvements.\n\n### Supported databases\n\n* [__`SQLite`__](https://www.sqlite.org/index.html)\n* [__`PostgreSQL`__](https://www.postgresql.org/)\n* [__`MySQL`__](https://www.mysql.com/)\n* [__`Microsoft SQL Server`__](https://www.microsoft.com/sql-server)\n\n### Supported Python versions\n\n* [__`Python \u003e= 3.8`__](https://www.python.org/)\n\n### Supported operating systems\n\n* [__`Linux`__](https://pt.wikipedia.org/wiki/Linux)\n* [__`MacOS`__](https://pt.wikipedia.org/wiki/Macos)\n* [__`Windows`__](https://pt.wikipedia.org/wiki/Microsoft_Windows)\n\n\n### Example of installation:\n\n__PIP__\n\n```bash\n$ pip install pysqlx-core\n```\n\n__Poetry__\n\n```bash\n$ poetry add pysqlx-core\n```\n\n### Example of usage:\n\n```python\nimport pysqlx_core\nimport asyncio\n\nasync def main(sql):\n # Create a connection \n db = await pysqlx_core.new(uri=\"postgresql://postgres:postgrespw@localhost:49153\")\n \n # Create a table\n stmt = pysqlx_core.PySQLxStatement(\n provider=\"postgresql\", \n sql=\"\"\"\n CREATE TABLE IF NOT EXISTS test (\n id SERIAL PRIMARY KEY,\n name VARCHAR(255) NOT NULL\n );\n \"\"\")\n await db.execute(stmt=stmt)\n\n # Insert a row and return quantity rows affected\n insert = pysqlx_core.PySQLxStatement(\n provider=\"postgresql\", \n sql=\"INSERT INTO test (name) VALUES (:name);\",\n params={\"name\": \"Carlos\"}\n )\n await db.execute(stmt=insert)\n\n # can you see the sql and params pre builded\n print(\"SQL:\", insert.sql())\n # output: INSERT INTO test (name) VALUES ($1);\n print(\"PARAMS:\", insert.params())\n # output: ['Carlos']\n\n # Select all rows, return a class PySQLxResponse\n result = await db.query_typed(stmt=pysqlx_core.PySQLxStatement(\n provider=\"postgresql\", \n sql=\"SELECT * FROM test;\"\n )\n )\n # get first row\n row = result.get_first() # Dict[str, Any] \n # get all rows\n rows = result.get_all() # List[Dict[str, Any]]\n # return the db 'types' to Pydantic BaseModel\n types = result.get_types() # Dict[str, str] \n\n # Select all rows, return how List[Dict[str, Any]]\n rows = await db.query_all(pysqlx_core.PySQLxStatement(provider=\"postgresql\", sql=\"SELECT * FROM test;\"))\n\n # close? no need 👌-\u003e auto-close when finished programmer or go out of context..\n \nasyncio.run(main())\n```\n\n","description_content_type":"text/markdown; charset=UTF-8; variant=GFM","author_email":"Carlos Rian \[email protected]\u003e","keywords":"async, database, sql, faster, pysqlx","classifiers":["Development Status :: 3 - Alpha","Framework :: AnyIO","Framework :: AsyncIO","Framework :: FastAPI","Framework :: Flask","Framework :: IPython","Intended Audience :: Developers","Intended Audience :: Information Technology","Intended Audience :: Other Audience","Intended Audience :: System Administrators","License :: OSI Approved :: MIT License","Operating System :: MacOS","Operating System :: Microsoft :: Windows","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3 :: Only","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.13","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: Implementation :: CPython","Programming Language :: Python :: Implementation :: PyPy","Programming Language :: Rust","Topic :: Database","Topic :: Internet","Topic :: Software Development","Topic :: Software Development :: Libraries","Topic :: Software Development :: Libraries :: Application Frameworks","Topic :: Software Development :: Libraries :: Python Modules","Typing :: Typed"],"platform":[],"requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":[],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Homepage","Source"],"uploaded_via":"twine/5.1.1 CPython/3.10.14","upload_time":"2024-08-25 16:02:08.703895 UTC","filename":"pysqlx_core-0.1.51b1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl","size":"5090937","path":"f1/3c/c9e602bc9667b75b4848f822d14093af5f4dc0a24331116d03b3d2ea8463/pysqlx_core-0.1.51b1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl","python_version":"cp39","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"6f94396ef4505dc4c1d7995f303fb391","sha256_digest":"eec96386483c995b4c91776994db3e48a9af84f8865d47176c9592e0ed406493","blake2_256_digest":"f13cc9e602bc9667b75b4848f822d14093af5f4dc0a24331116d03b3d2ea8463","license_files":[]}
|
5 |
-
{"name":"rocketscraper","version":"0.0.1","summary":"Python SDK for the Rocket Scraper API.","description":"# Rocket Scraper API Python SDK\n\nPython SDK for the [Rocket Scraper API](https://rocketscraper.com). For more information, visit the [GitHub repository](https://github.com/rocketscraper/rocketscraper-sdk-python).\n\n## Requirements\n\n- [Python](https://www.python.org/) version 3.7 or above\n\n## Installation\n\n```bash\npip install rocketscraper\n```\n\n## Usage\n\nTo use the SDK, you need to create a new instance of the `RocketClient` class and pass your API key as an argument.\n\n### Setup\n\n```python\nfrom rocketscraper import RocketClient\n\nrocket_client = RocketClient(api_key='YOUR_API_KEY')\n```\n\n### Scrape\n\nThe `scrape` method allows you to scrape data from a website using a schema. The method returns the scraped data in the format specified in the schema.\n\n```python\ndata = rocket_client.scrape(\n url='https://ycombinator.com/companies/pagerduty',\n schema={\n 'company': 'string',\n 'image_url': 'string',\n 'founded_at': 'string',\n 'size': 'integer',\n 'location': 'string',\n 'short_description': 'string',\n 'long_description': 'string',\n 'is_active': 'boolean',\n 'founders': [\n {\n 'name': 'string',\n 'role': 'string',\n },\n ],\n }\n)\n\nprint(data)\n```\n\nFor more details, visit the [Python SDK GitHub repository](https://github.com/rocketscraper/rocketscraper-sdk-python).\n\n## Documentation\n\nFor more information on how to use the Rocket Scraper API, visit the [Rocket Scraper API documentation](https://docs.rocketscraper.com).\n\n## License\n\nThis project is licensed under the MIT License. See the [LICENSE](https://github.com/rocketscraper/rocketscraper-sdk-python/blob/main/LICENSE) file for more details.\n","description_content_type":"text/markdown","author_email":"Rocket Scraper API \[email protected]\u003e","license":"MIT","keywords":"rocketscraper, api, web-scraping, ai","classifiers":["License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python :: 3"],"platform":[],"requires_python":"\u003e=3.7","requires":[],"provides":[],"obsoletes":[],"requires_dist":[],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Bug Tracker","Homepage","Repository"],"uploaded_via":"twine/5.1.1 CPython/3.12.1","upload_time":"2024-10-19 04:05:31.890108 UTC","filename":"rocketscraper-0.0.1.tar.gz","size":"3258","path":"2f/c9/08c3a25f44eb74d8830b5be98d4f106478c815c4e05ca9790d8cc2266f23/rocketscraper-0.0.1.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"5db63dd880fdcc34404c520289b9c486","sha256_digest":"0ecb4ed59c80351d564a797b76046895968e0cef39d9ea2ca21cd70f7ac206b5","blake2_256_digest":"2fc908c3a25f44eb74d8830b5be98d4f106478c815c4e05ca9790d8cc2266f23","license_files":[]}
|
6 |
-
{"name":"hestia-earth-validation","version":"0.32.13","summary":"HESTIA Data Validation library","description":"# HESTIA Data Validation\n\n[](https://gitlab.com/hestia-earth/hestia-data-validation/commits/master)\n[](https://gitlab.com/hestia-earth/hestia-data-validation/commits/master)\n[](https://hestia-data-validation.readthedocs.io/en/latest/?badge=latest)\n\n## Install\n\n```bash\npip install hestia_earth_validation\n```\n\n## Usage\n\n```python\nfrom hestia_earth.validation import validate\n\n# for each node, this will return a list containing all the errors/warnings (empty list if no errors/warnings)\nerrors = validate(nodes)\n```\n\nNote 1: if you want to validate existing data (with `@type` and `@id` fields), please set the following environment variable:\n\n```\nVALIDATE_EXISTING_NODES=true\n```\n\nNote 2: to skip searching for terms using the HESTIA API, please enable mocking:\n\n```\nENABLE_MOCKING=true\n```\n\n\n","description_content_type":"text/markdown","author":"Guillaume Royer","author_email":"[email protected]","license":"MIT","keywords":"hestia, data, validation","classifiers":[],"platform":[],"home_page":"https://gitlab.com/hestia-earth/hestia-data-validation","requires_python":"\u003e=3","requires":[],"provides":[],"obsoletes":[],"requires_dist":["hestia-earth.schema==30.*","hestia-earth.utils\u003e=0.13.3","hestia-earth.distribution==0.0.15","requests","python-dateutil","hestia-earth.models\u003e=0.54.0; extra == \"models\"","hestia-earth.earth-engine\u003e=0.4.5; extra == \"spatial\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"twine/6.0.1 CPython/3.9.21","upload_time":"2024-12-06 09:51:22.214171 UTC","filename":"hestia_earth_validation-0.32.13-py3-none-any.whl","size":"81854","path":"12/df/b2686786757c3e715c9c9356ea0e4ab6e079a80a7152f97934cc74d22967/hestia_earth_validation-0.32.13-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"bd6797159cdd6e058f64af0fe921a9be","sha256_digest":"fb2cc61835f76f17c7b843474dad2acf20dabbd4d3da5c5503fe0ed1848ced5f","blake2_256_digest":"12dfb2686786757c3e715c9c9356ea0e4ab6e079a80a7152f97934cc74d22967","license_files":[]}
|
7 |
-
{"name":"rosbags","version":"0.10.1","summary":"Pure Python library to read, modify, convert, and write rosbag files.","description":".. image:: https://gitlab.com/ternaris/rosbags/badges/master/pipeline.svg\n :target: https://gitlab.com/ternaris/rosbags/-/commits/master\n :alt: pipeline status\n\n.. image:: https://gitlab.com/ternaris/rosbags/badges/master/coverage.svg\n :target: https://gitlab.com/ternaris/rosbags/-/commits/master\n :alt: coverage report\n\n.. image:: https://img.shields.io/pypi/pyversions/rosbags\n :alt: python versions\n\n.. image:: https://img.shields.io/pypi/dm/rosbags\n :alt: PyPI - Downloads\n\n=======\nRosbags\n=======\n\nRosbags is the **pure python** library for everything rosbag. It contains:\n\n- **highlevel** easy-to-use interfaces,\n- **rosbag2** reader and writer,\n- **rosbag1** reader and writer,\n- **extensible** type system with serializers and deserializers,\n- **efficient converter** between rosbag1 and rosbag2,\n- and more.\n\nRosbags does not have any dependencies on the ROS software stacks and can be used on its own or alongside ROS1 or ROS2.\n\nRosbags was developed for `MARV \u003chttps://gitlab.com/ternaris/marv-robotics\u003e`_, which requires a fast, correct, and flexible library to read, manipulate, and write the various rosbag file formats.\n\n\nGetting started\n===============\n\nRosbags is published on PyPI and does not have any special dependencies. Simply install with pip::\n\n pip install rosbags\n\n\nRead and deserialize messages from rosbag1 or rosbag2 files:\n\n.. code-block:: python\n\n from pathlib import Path\n\n from rosbags.highlevel import AnyReader\n from rosbags.typesys import Stores, get_typestore\n\n bagpath = Path('/home/ros/rosbag_2020_03_24')\n\n # Create a type store to use if the bag has no message definitions.\n typestore = get_typestore(Stores.ROS2_FOXY)\n\n # Create reader instance and open for reading.\n with AnyReader([bagpath], default_typestore=typestore) as reader:\n connections = [x for x in reader.connections if x.topic == '/imu_raw/Imu']\n for connection, timestamp, rawdata in reader.messages(connections=connections):\n msg = reader.deserialize(rawdata, connection.msgtype)\n print(msg.header.frame_id)\n\n\nConvert between rosbag versions::\n\n # Convert \"foo.bag\", result will be \"foo/\"\n rosbags-convert foo.bag\n\n # Convert \"bar\", result will be \"bar.bag\"\n rosbags-convert bar\n\n # Convert \"foo.bag\", save the result as \"bar\"\n rosbags-convert foo.bag --dst /path/to/bar\n\n # Convert \"bar\", save the result as \"foo.bag\"\n rosbags-convert bar --dst /path/to/foo.bag\n\n\nDocumentation\n=============\n\nRead the `documentation \u003chttps://ternaris.gitlab.io/rosbags/\u003e`_ for further information.\n\n.. end documentation\n\n\nContributing\n============\n\nThank you for considering to contribute to rosbags.\n\nTo submit issues or create merge requests please follow the instructions provided in the `contribution guide \u003chttps://gitlab.com/ternaris/rosbags/-/blob/master/CONTRIBUTING.rst\u003e`_.\n\nBy contributing to rosbags you accept and agree to the terms and conditions laid out in there.\n\n\nDevelopment\n===========\n\nClone the repository and setup your local checkout::\n\n git clone https://gitlab.com/ternaris/rosbags.git\n\n cd rosbags\n python -m venv venv\n . venv/bin/activate\n\n pip install -r requirements-dev.txt\n pip install -e .\n\n\nThis creates a new virtual environment with the necessary python dependencies and installs rosbags in editable mode. The rosbags code base uses pytest as its test runner, run the test suite by simply invoking::\n\n pytest\n\n\nTo build the documentation from its source run sphinx-build::\n\n sphinx-build -a docs public\n\n\nThe entry point to the local documentation build should be available under ``public/index.html``.\n\n\nSupport\n=======\n\nProfessional support is available from `Ternaris \u003chttps://ternaris.com\u003e`_.\n","description_content_type":"text/x-rst","author_email":"Ternaris \[email protected]\u003e","license":"Apache-2.0","keywords":"cdr, conversion, deserialization, idl, mcap, message, msg, reader, ros, ros2, rosbag, rosbag2, serialization, writer","classifiers":["Development Status :: 4 - Beta","License :: OSI Approved :: Apache Software License","Programming Language :: Python","Programming Language :: Python :: 3 :: Only","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Topic :: Scientific/Engineering","Typing :: Typed"],"platform":[],"requires_python":"\u003e=3.10","requires":[],"provides":[],"obsoletes":[],"requires_dist":["lz4","numpy","ruamel.yaml","zstandard","declinate; extra == \"dev\"","mypy; extra == \"dev\"","pytest; extra == \"dev\"","pytest-cov; extra == \"dev\"","reuse; extra == \"dev\"","ruff; extra == \"dev\"","sphinx; extra == \"dev\"","sphinx-autodoc-typehints; extra == \"dev\"","sphinx-rtd-theme; extra == \"dev\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changelog","Documentation","Homepage","Issues","Source"],"uploaded_via":"twine/5.0.0 CPython/3.12.3","upload_time":"2024-05-10 14:45:18.284621 UTC","filename":"rosbags-0.10.1-py3-none-any.whl","size":"109626","path":"46/57/b34d1e934c62f5a99b717ac8dd3566ae02b68df8a8c1976cb9bf5f8f0341/rosbags-0.10.1-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"b692c8b728596f2349e1401924c5e91a","sha256_digest":"fa814bfb40a89801a9d5a15be7620238002191c39cf786497e158556cfab9527","blake2_256_digest":"4657b34d1e934c62f5a99b717ac8dd3566ae02b68df8a8c1976cb9bf5f8f0341","license_files":[]}
|
8 |
-
{"name":"tach","version":"0.14.3","summary":"A Python tool to maintain a modular package architecture.","description":"# Tach\n\n[](https://pepy.tech/project/tach)\n[](https://pypi.Python.org/pypi/tach)\n[](https://pypi.Python.org/pypi/tach)\n[](https://pypi.Python.org/pypi/tach)\n[](https://github.com/gauge-sh/tach/actions/workflows/ci.yml)\n[](https://microsoft.github.io/pyright/)\n[](https://github.com/astral-sh/ruff)\n\na Python tool to enforce dependencies, written in Rust. Inspired by modular monolithic architecture.\n\n[Docs](https://docs.gauge.sh)\n\n[Discord](https://discord.gg/Kz2TnszerR)\n\n\u003cdiv align=\"center\"\u003e\n \u003cimg src=\"docs/assets/light_logo.png\" alt=\"gauge-logo\" width=\"30%\" style=\"border-radius: 50%; padding-bottom: 20px\"/\u003e\n\u003c/div\u003e\n\nTach lets you define and enforce dependencies between Python modules within your project.\n\nHere's an example:\n\n\n\nIf a module tries to import from another module that is not listed as a dependency, Tach can prevent it.\n\nTach is:\n\n- 🌎 Open source\n- 🐍 Installable via pip\n- 🔧 Able to be adopted incrementally\n- ⚡ Implemented with no runtime impact\n- ♾️ Interoperable with your existing systems (cli, hooks, ci, etc.)\n\n## Getting Started\n\n### Installation\n\n```bash\npip install tach\n```\n\n### Setup\n\nTach allows you to configure where you want to place module boundaries in your project.\n\nYou can do this interactively - run:\n\n```bash\n tach mod\n# Up/Down: Navigate Enter: Mark/unmark module Right: Expand Left: Collapse Ctrl + Up: Jump to parent\n# Ctrl + s: Exit and save Ctrl + c: Exit without saving Ctrl + a: Mark/unmark all\n```\n\nMark each module boundary with 'Enter'. You can mark all of your top-level Python source packages, or just a few which you want to isolate.\n\nIf your Python code lives below your project root, or if you are working in a monorepo with multiple Python packages, mark your Python [source roots](https://docs.gauge.sh/usage/configuration#source-roots) using the 's' key.\n\nThis will create the config file for your project, `tach.toml`.\n\nOnce you've marked all the modules you want to enforce dependencies between, run:\n\n```bash\ntach sync\n```\n\nDependencies that exist between each module you've marked will be written to `tach.toml`.\n\nCheck out what Tach has found!\n\n```\ncat tach.toml\n```\n\nNote: Your [source roots](https://docs.gauge.sh/usage/configuration#source-roots) will implicitly be treated as module boundaries, and can show up as `\u003croot\u003e`.\n\n### Enforcement\n\nTach comes with a cli command to enforce the boundaries that you just set up! From the root of your Python project, run:\n\n```bash\ntach check\n```\n\nYou will see:\n\n```bash\n✅ All module dependencies validated!\n```\n\nYou can validate that Tach is working by either:\n\n1. Commenting out an item in a `depends_on` key in `tach.toml`\n2. By adding an import between modules that didn't previously import from each other.\n\nGive both a try and run `tach check` again. This will generate an error:\n\n```bash\n❌ tach/check.py[L8]: Cannot import 'tach.filesystem'. Module 'tach' cannot depend on 'tach.filesystem'.\n```\n\nEach error indicates an import which violates your dependencies. If your terminal supports hyperlinks, click on the file path to go directly to the error.\n\nWhen an error is detected, `tach check` will exit with a non-zero code. It can be easily integrated with CI/CD, [Pre-commit hooks](https://docs.gauge.sh/usage/commands#tach-install), and [VS Code](https://marketplace.visualstudio.com/items?itemName=Gauge.tach), and more!\n\n### Extras\n\nVisualize your dependency graph.\n\n```bash\ntach show [--web]\n```\n\nTach will generate a graph of your dependencies. Here's what this looks like for Tach:\n\n\n\nNote that this graph is generated remotely with the contents of your `tach.toml` when running `tach show --web`.\n\nIf you would like to use the [GraphViz DOT format](https://graphviz.org/about/) locally, simply running `tach show` will generate `tach_module_graph.dot` in your working directory.\n\nYou can view the dependencies and usages for a given path:\n\n```bash\ntach report my_package/\n# OR\ntach report my_module.py\n```\n\ne.g.:\n\n```bash\n\u003e tach report python/tach/filesystem\n[Dependencies of 'python/tach/filesystem']\npython/tach/filesystem/install.py[L6]: Import 'tach.hooks.build_pre_commit_hook_content'\npython/tach/filesystem/project.py[L5]: Import 'tach.constants.CONFIG_FILE_NAME'\n...\n-------------------------------\n[Usages of 'python/tach/filesystem']\npython/tach/cache/access.py[L8]: Import 'tach.filesystem.find_project_config_root'\npython/tach/cache/setup.py[L7]: Import 'tach.filesystem.find_project_config_root'\n...\n```\n\nTach also supports:\n\n- [Strict public interfaces for modules](https://docs.gauge.sh/usage/strict-mode/)\n- [Deprecating individual dependencies](https://docs.gauge.sh/usage/deprecate)\n- [Incremental adoption](https://docs.gauge.sh/usage/unchecked-modules)\n- [Manual file configuration](https://docs.gauge.sh/usage/configuration)\n- [Monorepos and namespace packages](https://docs.gauge.sh/usage/configuration#source-roots)\n- [Inline exceptions](https://docs.gauge.sh/usage/tach-ignore)\n- [Pre-commit hooks](https://docs.gauge.sh/usage/commands#tach-install)\n\nMore info in the [docs](https://docs.gauge.sh/). Tach logs anonymized usage statistics which can be [opted out](https://docs.gauge.sh/usage/faq/) of.\nIf you have any feedback, we'd love to talk!\n\nIf you have any questions or run into any issues, let us know by either reaching out on [Discord](https://discord.gg/Kz2TnszerR) or submitting a [Github Issue](https://github.com/gauge-sh/tach/issues)!\n\n---\n\n### Contributors\n\n\u003ca href=\"https://github.com/gauge-sh/tach/graphs/contributors\"\u003e\n \u003cimg src=\"https://contrib.rocks/image?repo=gauge-sh/tach\" /\u003e\n\u003c/a\u003e\n\n","description_content_type":"text/markdown; charset=UTF-8; variant=GFM","author_email":"Caelean Barnes \[email protected]\u003e, Evan Doyle \[email protected]\u003e","keywords":"python, module, package, guard, enforcement, boundary, enforcer, domain, architecture","classifiers":["Development Status :: 4 - Beta","Environment :: Console","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3 :: Only","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.13","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Software Development :: Libraries :: Python Modules","Topic :: Software Development :: Quality Assurance"],"platform":[],"requires_python":"\u003e=3.7","requires":[],"provides":[],"obsoletes":[],"requires_dist":["pyyaml~=6.0","tomli\u003e=1.2.2","tomli-w~=1.0","rich~=13.0","prompt-toolkit~=3.0","gitpython~=3.1","networkx\u003c4.0,\u003e=2.6","pydot\u003c4,\u003e=2","stdlib-list\u003e=0.10.0; python_version \u003c \"3.10\"","importlib-metadata\u003e=6.0; python_version == \"3.7\"","rich==13.8.1; extra == \"dev\"","prompt-toolkit==3.0.47; extra == \"dev\"","pip==24.0; extra == \"dev\"","pyright==1.1.387; extra == \"dev\"","ruff==0.7.2; extra == \"dev\"","setuptools==69.5.1; python_version \u003e \"3.7\" and extra == \"dev\"","twine==5.1.1; python_version \u003e \"3.7\" and extra == \"dev\"","build==1.2.1; python_version \u003e \"3.7\" and extra == \"dev\"","pytest==8.2.2; python_version \u003e \"3.7\" and extra == \"dev\"","pytest-mock==3.14.0; python_version \u003e \"3.7\" and extra == \"dev\"","coverage==7.6.0; python_version \u003e \"3.7\" and extra == \"dev\"","maturin==1.7.1; extra == \"dev\"","setuptools==47.1.0; python_version == \"3.7\" and extra == \"dev\"","twine==4.0.2; python_version == \"3.7\" and extra == \"dev\"","build==1.1.1; python_version == \"3.7\" and extra == \"dev\"","pytest==7.4.4; python_version == \"3.7\" and extra == \"dev\"","pytest-mock==3.11.1; python_version == \"3.7\" and extra == \"dev\"","coverage==7.2.7; python_version == \"3.7\" and extra == \"dev\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Homepage","Issues"],"uploaded_via":"maturin/1.7.1","upload_time":"2024-11-06 01:51:10.814136 UTC","filename":"tach-0.14.3-cp311-none-win32.whl","size":"2203425","path":"a6/b9/17d680dc905c7c25ca6f96d4a865545706ca86fd927e2fbe6f11ad801905/tach-0.14.3-cp311-none-win32.whl","python_version":"cp311","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"ba4d717de28193f66c75842c622c8cf0","sha256_digest":"ff9e7e5da33a3c6d76a7237d6ce6c76cf7112d8e84cb9a9dbbc2cbd4eaa57abb","blake2_256_digest":"a6b917d680dc905c7c25ca6f96d4a865545706ca86fd927e2fbe6f11ad801905","license_files":[]}
|
9 |
-
{"name":"ipset-c","version":"0.1.0.dev1","summary":"IPSet written in C","description":"# ipset_c\n\nIPSet is written in C.\nRuns on Windows and Linux.\nTens of times faster than pure Python netaddr.IPSet.\nOnly for IPv4. Not picklable.\n\n\n```\npip install ipset_c\n```\n\n```\nfrom ipset_c import IPSet\na = IPSet(['12.12.12.0/25', '12.12.12.128/25'])\na.getCidrs() # ['12.12.12.0/24']\na.addCidr('8.8.8.8/30')\na.getCidrs() # ['8.8.8.8/30', '12.12.12.0/24']\nb = IPSet(['12.12.12.0/25'])\na.isSubset(b) # False\na.isSuperset(b) # True\na == b # False\na \u003c= b # False\na \u003e= b # True\na.isContainsCidr(\"12.12.0.0/16\") # False\na.isIntersectsCidr(\"12.12.0.0/16\") # True\nb.addCidr('4.4.4.4/32')\na.getCidrs() # ['8.8.8.8/30', '12.12.12.0/24']\nb.getCidrs() # ['4.4.4.4/32', '12.12.12.0/25']\nc = a \u0026 b\nc.getCidrs() # ['12.12.12.0/25']\nc = a | b\nc.getCidrs() # ['4.4.4.4/32', '8.8.8.8/30', '12.12.12.0/24']\nc = a - b\nc.getCidrs() # ['8.8.8.8/30', '12.12.12.128/25']\na.removeCidr('8.8.8.8/30')\na.getCidrs() # ['12.12.12.0/24']\nlen(a) # 256\nc = a.copy()\nbool(IPSet([])) # False\n```\n\n","description_content_type":"text/markdown","author":"glowlex","author_email":"[email protected]","license":"LGPL","keywords":"network, ipset","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)","License :: Other/Proprietary License","Operating System :: Microsoft :: Windows","Operating System :: POSIX :: Linux","Programming Language :: C","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Software Development","Topic :: Software Development :: Libraries","Topic :: Software Development :: Libraries :: Python Modules","Topic :: System :: Networking"],"platform":[],"requires_python":"\u003c4,\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["setuptools\u003c71.0.0,\u003e=70.0.0"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Source"],"uploaded_via":"twine/5.1.1 CPython/3.9.13","upload_time":"2024-07-30 18:12:11.882642 UTC","filename":"ipset_c-0.1.0.dev1-cp39-cp39-macosx_14_0_arm64.whl","size":"17432","path":"b0/98/3980b082dd30ab64d00c71b34454a4ca948ece41ce8e095e02e3574345e4/ipset_c-0.1.0.dev1-cp39-cp39-macosx_14_0_arm64.whl","python_version":"cp39","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"c9878022419fcbcecf12126d9404deaf","sha256_digest":"4366ca62c38f7211883ca09ab554451957c6c9f10537ddc77fc4a8457193bd5f","blake2_256_digest":"b0983980b082dd30ab64d00c71b34454a4ca948ece41ce8e095e02e3574345e4","license_files":[]}
|
10 |
-
{"name":"netutils","version":"1.9.1","summary":"Common helper functions useful in network automation.","description":"# Netutils\n\n\u003cp align=\"center\"\u003e\n \u003cimg src=\"https://raw.githubusercontent.com/networktocode/netutils/develop/docs/images/icon-Netutils.png\" class=\"logo\" height=\"200px\"\u003e\n \u003cbr\u003e\n \u003ca href=\"https://github.com/networktocode/netutils/actions\"\u003e\u003cimg src=\"https://github.com/networktocode/netutils/actions/workflows/ci.yml/badge.svg?branch=main\"\u003e\u003c/a\u003e\n \u003ca href=\"https://netutils.readthedocs.io/en/latest\"\u003e\u003cimg src=\"https://readthedocs.org/projects/netutils/badge/\"\u003e\u003c/a\u003e\n \u003ca href=\"https://pypi.org/project/netutils/\"\u003e\u003cimg src=\"https://img.shields.io/pypi/v/netutils\"\u003e\u003c/a\u003e\n \u003ca href=\"https://pypi.org/project/netutils/\"\u003e\u003cimg src=\"https://img.shields.io/pypi/dm/netutils\"\u003e\u003c/a\u003e\n \u003cbr\u003e\n\u003c/p\u003e\n\n## Overview\n\nA Python library that is a collection of functions that are used in the common network automation tasks. Tasks such as converting a BGP ASN to and from dotted format, normalizing an interface name, or \"type 5\" encrypting a password. The intention is to centralize these functions while keeping the library light.\n\n## Documentation\n\nFull web-based HTML documentation for this library can be found over on the [Netutils Docs](https://netutils.readthedocs.io) website:\n\n- [User Guide](https://netutils.readthedocs.io/en/latest/user/lib_overview/) - Overview, Using the library, Getting Started.\n- [Administrator Guide](https://netutils.readthedocs.io/en/latest/admin/install/) - How to Install, Configure, Upgrade, or Uninstall the library.\n- [Developer Guide](https://netutils.readthedocs.io/en/latest/dev/contributing/) - Extending the library, Code Reference, Contribution Guide.\n- [Release Notes / Changelog](https://netutils.readthedocs.io/en/latest/admin/release_notes/).\n- [Frequently Asked Questions](https://netutils.readthedocs.io/en/latest/user/faq/).\n\n### Contributing to the Docs\n\nAll the Markdown source for the library documentation can be found under the [docs](https://github.com/networktocode/netutils/tree/develop/docs) folder in this repository. For simple edits, a Markdown capable editor is sufficient - clone the repository and edit away.\n\nIf you need to view the fully generated documentation site, you can build it with [mkdocs](https://www.mkdocs.org/). A container hosting the docs will be started using the invoke commands (details in the [Development Environment Guide](https://netutils.readthedocs.io/en/latest/dev/dev_environment/#docker-development-environment)) on [http://localhost:8001](http://localhost:8001). As your changes are saved, the live docs will be automatically reloaded.\n\nAny PRs with fixes or improvements are very welcome!\n\n## Questions\n\nFor any questions or comments, please check the [FAQ](https://netutils.readthedocs.io/en/latest/user/faq/) first. Feel free to also swing by the [Network to Code Slack](https://networktocode.slack.com/) (channel `#networktocode`), sign up [here](http://slack.networktocode.com/) if you don't have an account.\n\n","description_content_type":"text/markdown","author":"Network to Code, LLC","author_email":"[email protected]","license":"Apache-2.0","keywords":"netutils, network utils, network utilities, net-utils","classifiers":["Development Status :: 5 - Production/Stable","Intended Audience :: Developers","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],"platform":[],"home_page":"https://netutils.readthedocs.io","requires_python":"\u003c4.0,\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["jsonschema\u003c5.0.0,\u003e=4.17.3; extra == \"optionals\"","napalm\u003c5.0.0,\u003e=4.0.0; extra == \"optionals\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Repository"],"uploaded_via":"twine/5.1.0 CPython/3.12.4","upload_time":"2024-08-05 16:52:54.258991 UTC","filename":"netutils-1.9.1.tar.gz","size":"492197","path":"84/1c/7fc80b980ad541f73c9fbc976d53a41cd1ad4b19183ca99598bf828db2e2/netutils-1.9.1.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"db72ca3d505de495d1a732019fd5fea9","sha256_digest":"8ad8b5e02eb9d6692d0aaaf9c0f36da1a81f520f426a79d0e08e56cf7dbb3476","blake2_256_digest":"841c7fc80b980ad541f73c9fbc976d53a41cd1ad4b19183ca99598bf828db2e2","license_files":[]}
|
11 |
-
{"name":"ast-grep-py","version":"0.23.0","summary":"Structural Search and Rewrite code at large scale using precise AST pattern.","description":"# ast-grep python binding\n\n[](https://pypi.org/project/ast-grep-py/)\n[](https://ast-grep.github.io/)\n\n\u003cp align=center\u003e\n \u003cimg src=\"https://ast-grep.github.io/logo.svg\" alt=\"ast-grep\"/\u003e\n\u003c/p\u003e\n\n## ast-grep\n\n`ast-grep` is a tool for code structural search, lint, and rewriting. \n\nThis crate intends to build a native python binding of ast-grep and provide a python API for programmatic usage.\n\n## Installation\n\n```bash\npip install ast-grep-py\n```\n\n## Usage\n\nYou can take our tests as examples. For example, [test_simple.py](./tests/test_simple.py) shows how to use ast-grep to search for a pattern in a file.\n\nPlease see the [API usage guide](https://ast-grep.github.io/guide/api-usage.html) and [API reference](https://ast-grep.github.io/reference/api.html) for more details.\n\nOther resources include [ast-grep's official site](https://ast-grep.github.io/) and [repository](https://github.com/ast-grep/ast-grep).\n\n## Development\n\n### Setup virtualenv\n\n```shell\npython -m venv venv\n```\n\n### Activate venv\n\n```shell\nsource venv/bin/activate\n```\n\n### Install `maturin`\n\n```shell\npip install maturin[patchelf]\n```\n\n### Build bindings\n\n```shell\nmaturin develop\n```\n\n### Run tests\n\n```shell\npytest\n```\n\nAll tests files are under [tests](./tests) directory.\n\n## License\n\nThis project is licensed under the MIT license.\n\n","description_content_type":"text/markdown; charset=UTF-8; variant=GFM","author":"Herrington Darkholme \[email protected]\u003e","author_email":"Herrington Darkholme \[email protected]\u003e","maintainer_email":"Herrington Darkholme \[email protected]\u003e","license":"MIT","keywords":"ast, pattern, codemod, structural search, rewrite","classifiers":["Development Status :: 3 - Alpha","Environment :: Console","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Rust","Topic :: Security","Topic :: Software Development","Topic :: Software Development :: Quality Assurance","Topic :: Text Processing"],"platform":[],"requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["pytest\u003e=7; extra == \"test\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changelog","Documentation","Repository"],"uploaded_via":"maturin/1.6.0","upload_time":"2024-06-11 07:50:38.373195 UTC","filename":"ast_grep_py-0.23.0-cp38-cp38-macosx_11_0_arm64.whl","size":"5146700","path":"f9/23/2dc79869fa480bb7831734f1bd29109797eabda7360daacba0d5907d980b/ast_grep_py-0.23.0-cp38-cp38-macosx_11_0_arm64.whl","python_version":"cp38","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"3992321a7972ead5f757b9bd32f322b4","sha256_digest":"08220075ab550ee3f6b01a6c26ad6db11f11949b4ef2a9776910088110ae6973","blake2_256_digest":"f9232dc79869fa480bb7831734f1bd29109797eabda7360daacba0d5907d980b","license_files":[]}
|
12 |
-
{"name":"hussh","version":"0.1.3","summary":"SSH for Humans","description":"# Hussh: SSH for humans.\nHussh (pronounced \"hush\") is a client-side ssh library that offers low level performance through a high level interface.\n\nHussh uses [pyo3](https://docs.rs/pyo3/latest/pyo3/) to create Python bindings around the [ssh2](https://docs.rs/ssh2/latest/ssh2/) library for Rust.\n\n# Installation\n```\npip install hussh\n```\n\n# QuickStart\nHussh currently just offers a `Connection` class as your primary interface.\n```python\nfrom hussh import Connection\n\nconn = Connection(host=\"my.test.server\", username=\"user\", password=\"pass\")\nresult = conn.execute(\"ls\")\nprint(result.stdout)\n```\n\nThat's it! One import and class instantion is all you need to:\n- Execute commands\n- Perform SCP actions\n- Perform SFTP actions\n- Get an interactive shell\n\n# Authentication\nYou've already seen password-based authentication, but here it is again.\n```python\nconn = Connection(host=\"my.test.server\", username=\"user\", password=\"pass\")\n\n# or leave out username and connect as root\nconn = Connection(host=\"my.test.server\", password=\"pass\")\n```\n\nIf you prefer key-based authentication, Hussh can do that as well.\n```python\nconn = Connection(host=\"my.test.server\", private_key=\"~/.ssh/id_rsa\")\n\n# If your key is password protected, just use the password argument\nconn = Connection(host=\"my.test.server\", private_key=\"~/.ssh/id_rsa\", password=\"pass\")\n```\n\nHussh can also do agent-based authentication, if you've already established it.\n```python\nconn = Connection(\"my.test.server\")\n```\n\n# Executing commands\nThe most basic foundation of ssh libraries is the ability to execute commands against the remote host.\nFor Hussh, just use the `Connection` object's `execute` method.\n```python\nresult = conn.execute(\"whoami\")\nprint(result.stdout, result.stderr, result.status)\n```\nEach execute returns an `SSHResult` object with command's stdout, stderr, and status.\n\n# SFTP\nIf you need to transfer files to/from the remote host, SFTP may be your best bet.\n\n## Writing Files and Data\n```python\n# write a local file to the remote destination\nconn.sftp_write(local_path=\"/path/to/my/file\", remote_path=\"/dest/path/file\")\n\n# Write UTF-8 data to a remote file\nconn.sftp_write_data(data=\"Hello there!\", remote_path=\"/dest/path/file\")\n```\n\n## Reading Files\n```python\n# You can copy a remote file to a local destination\nconn.sftp_read(remote_path=\"/dest/path/file\", local_path=\"/path/to/my/file\")\n# Or copy the remote file contents to a string\ncontents = conn.sftp_read(remote_path=\"/dest/path/file\")\n```\n\n## Copy files from one connection to another\nHussh offers a shortcut that allows you to copy a file between two established connections.\n```python\nsource_conn = Connection(\"my.first.server\")\ndest_conn = Connection(\"my.second.server\", password=\"secret\")\n# Copy from source to destination\nsource_conn.remote_copy(source_path=\"/root/myfile.txt\", dest_conn=dest_conn)\n```\nBy default, if you don't pass in an alternate `dest_path`, Hussh will copy it to the same path as it came from on source.\n\n\n# SCP\nFor remote servers that support SCP, Hussh can do that to.\n\n## Writing Files and Data\n```python\n# write a local file to the remote destination\nconn.scp_write(local_path=\"/path/to/my/file\", remote_path=\"/dest/path/file\")\n\n# Write UTF-8 data to a remote file\nconn.scp_write_data(data=\"Hello there!\", remote_path=\"/dest/path/file\")\n```\n\n## Reading Files\n```python\n# You can copy a remote file to a local destination\nconn.scp_read(remote_path=\"/dest/path/file\", local_path=\"/path/to/my/file\")\n# Or copy the remote file contents to a string\ncontents = conn.scp_read(remote_path=\"/dest/path/file\")\n```\n\n# Tailing Files\nHussh offers a built-in method for tailing files on a `Connection` with the `tail` method.\n```python\nwith conn.tail(\"/path/to/file.txt\") as tf:\n # perform some actions or wait\n print(tf.read()) # at any time, you can read any unread contents\n # when you're done tailing, exit the context manager\nprint(tf.tailed_contents)\n```\n\n# Interactive Shell\nIf you need to keep a shell open to perform more complex interactions, you can get an `InteractiveShell` instance from the `Connection` class instance.\nTo use the interactive shell, it is recommended to use the `shell()` context manager from the `Connection` class.\nYou can send commands to the shell using the `send` method, then get the results from `exit_result` when you exit the context manager.\n\n```python\nwith conn.shell() as shell:\n shell.send(\"ls\")\n shell.send(\"pwd\")\n shell.send(\"whoami\")\n\nprint(shell.exit_result.stdout)\n```\n**Note:** The `read` method sends an EOF to the shell, so you won't be able to send more commands after calling `read`. If you want to send more commands, you would need to create a new `InteractiveShell` instance.\n\n# Disclaimer\nThis is a VERY early project that should not be used in production code!\nThere isn't even proper exception handling, so try/except won't work.\nWith that said, try it out and let me know your thoughts!\n\n# Future Features\n- Proper exception handling\n- Async Connection class\n- Low level bindings\n- Misc codebase improvements\n- TBD...\n\n","description_content_type":"text/markdown; charset=UTF-8; variant=GFM","author_email":"Jacob J Callahan \[email protected]\u003e","keywords":"ssh, ssh2, rust, pyo3","classifiers":["Natural Language :: English","Operating System :: MacOS :: MacOS X","Operating System :: POSIX","Operating System :: POSIX :: Linux","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: Implementation :: CPython","Programming Language :: Python :: Implementation :: PyPy","Programming Language :: Rust","Topic :: Software Development :: Libraries","Topic :: Software Development :: Libraries :: Python Modules","Topic :: System :: Networking","Topic :: System :: Shells"],"platform":[],"requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["ruff; extra == \"dev\"","maturin; extra == \"dev\"","pytest; extra == \"dev\"","docker; extra == \"dev\"","pexpect; extra == \"dev\"","patchelf; sys_platform == \"linux\" and extra == \"dev\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"maturin/1.5.1","upload_time":"2024-03-27 05:10:49.435307 UTC","filename":"hussh-0.1.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl","size":"2339458","path":"79/eb/4a872971c5b6622b7390067477c97f2dfa190cc1f7577619ead474ee0144/hussh-0.1.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl","python_version":"cp39","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"58709dd56add5261fbb3b8e8c89c625e","sha256_digest":"94a48e34c86126092d79505676a7d2efd94ba071a245b3583d1f49bb5abf5b24","blake2_256_digest":"79eb4a872971c5b6622b7390067477c97f2dfa190cc1f7577619ead474ee0144","license_files":[]}
|
13 |
-
{"name":"pageai-sdk","version":"0.6.0.75","summary":"PageAI API","description":" # Introduction The PageAI (short for Synthetic EPUB) API is capapble of transforming multi page image only PDF files into accessible EPUBs. # noqa: E501\n \n","author":"OpenAPI Generator community","author_email":"[email protected]","keywords":"OpenAPI, OpenAPI-Generator, PageAI API","classifiers":[],"platform":[],"requires":[],"provides":[],"obsoletes":[],"requires_dist":["urllib3\u003e=1.15","six\u003e=1.10","certifi","python-dateutil"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"twine/5.1.1 CPython/3.8.16","upload_time":"2024-08-28 21:07:28.756889 UTC","filename":"pageai_sdk-0.6.0.75-py3-none-any.whl","size":"20175","path":"73/29/7687eae534f5cdd1e222289ae6acaa102d97637e1974a89d9a57be3abf4b/pageai_sdk-0.6.0.75-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"5f4a6e0652ef57f1b674f9b2d36b3549","sha256_digest":"d124f3df3bfb57389f9c228d60a24acde8696a2e5707628f608c1eed7a6b6f2c","blake2_256_digest":"73297687eae534f5cdd1e222289ae6acaa102d97637e1974a89d9a57be3abf4b","license_files":[]}
|
14 |
-
{"name":"soxr","version":"0.5.0rc1","summary":"High quality, one-dimensional sample-rate conversion library","description":"# Python-SoXR\n\n[](https://github.com/dofuuz/python-soxr) [](https://pypi.org/project/soxr/) [](https://anaconda.org/conda-forge/soxr-python) [](https://repology.org/project/python:soxr/versions) [](https://python-soxr.readthedocs.io)\n\nHigh quality, one-dimensional sample-rate conversion library for Python.\n\n- Homepage: https://github.com/dofuuz/python-soxr\n- Documentation: https://python-soxr.readthedocs.io\n- PyPI: https://pypi.org/project/soxr/\n\nKeywords: Resampler, Audio resampling, Samplerate conversion, DSP(Digital Signal Processing)\n\nPython-SoXR is a Python wrapper of [libsoxr](https://sourceforge.net/projects/soxr/).\n\n\n## Installation\n\n```\npip install soxr\n```\n\nIf installation fails, upgrade pip with `python -m pip install --upgrade pip` and try again.\n\n\n### in Conda environment\n\n```\nconda install -c conda-forge soxr-python\n```\n\nNote: Conda packge name is `soxr-python`, not python-soxr.\n\n\n## Basic usage\n\n```python\nimport soxr\n\ny = soxr.resample(\n x, # input array – mono(1D) or multi-channel(2D of [frame, channel])\n 48000, # input samplerate\n 16000 # target samplerate\n)\n```\nIf input is not `numpy.ndarray`, it will be converted to `numpy.ndarray(dtype='float32')`. \ndtype should be one of float32, float64, int16, int32.\n\nOutput is `numpy.ndarray` with same dimension and data type of input.\n\n\n## Streaming usage\n\nUse `ResampleStream` for real-time processing or very long signal.\n\n```python\nimport soxr\n\nrs = soxr.ResampleStream(\n 44100, # input samplerate\n 16000, # target samplerate\n 1, # channel(s)\n dtype='float32' # data type (default = 'float32')\n)\n\neof = False\nwhile not eof:\n # Get chunk\n ...\n\n y_chunk = rs.resample_chunk(\n x, # input aray – mono(1D) or multi-channel(2D of [frame, channel])\n last=eof # Set True at end of input\n )\n```\n\nOutput frame count may not be consistent. This is normal operation. \n(ex. [0, 0, 0, 186, 186, 166, 186, 186, 168, ...])\n\n📝 [More code examples](https://dofuuz.github.io/dsp/2024/05/26/sample-rate-conversion-in-python.html)\n\n\n## Benchmark\n\nSweep, impulse, speed compairsion with other resamplers for Python.\n\nhttps://colab.research.google.com/drive/1_xYUs00VWYOAXShB85W1MFWaUjGHfO4K?usp=sharing\n\n\n### Speed comparison summary\n\nDownsampling 10 sec of 48000 Hz to 44100 Hz. \nRan on Google Colab.\n\nLibrary | Time on CPU (ms)\n------------------------ | ----------------\nsoxr (HQ) | 10.8\ntorchaudio | 13.8\nsoxr (VHQ) | 14.5\nscipy.signal.resample | 21.3\nlilfilter | 24.7\njulius | 31\nresampy (kaiser_fast) | 108\nsamplerate (sinc_medium) | 223\nresampy (kaiser_best) | 310\nsamplerate (sinc_best) | 794\n\n\n## Technical detail\n\nFor technical details behind resampler, see libsoxr docs.\n- https://sourceforge.net/p/soxr/wiki/Home/\n- http://sox.sourceforge.net/SoX/Resampling ([archive](https://web.archive.org/web/20230626144127/https://sox.sourceforge.net/SoX/Resampling))\n- https://sourceforge.net/p/soxr/code/ci/master/tree/src/soxr.h\n\nPython-SoXR uses [modified version](https://github.com/dofuuz/soxr) of libsoxr. [See changes here](https://github.com/dofuuz/soxr/compare/0.1.3...master). \nThese changes does not apply to dynamic linked builds. (e.g. conda-forge build) \nTo check the version of libsoxr, use `soxr.__libsoxr_version__`.\n\n\n## Credit and License\n\nPython-SoXR is LGPL v2.1+ licensed, following libsoxr's license.\n\n### OSS libraries used\n\n#### libsoxr (LGPLv2.1+)\nThe SoX Resampler library \nhttps://sourceforge.net/projects/soxr/\n\nPython-SoXR is a Python wrapper of libsoxr.\n\n#### PFFFT (BSD-like)\nPFFFT: a pretty fast FFT. \nhttps://bitbucket.org/jpommier/pffft/ \n\nlibsoxr dependency.\n","description_content_type":"text/markdown","author":"KEUM Myungchul","keywords":"audio resampling, samplerate conversion, SRC, signal processing, resampler","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","Intended Audience :: Science/Research","Intended Audience :: Telecommunications Industry","License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)","Programming Language :: C","Programming Language :: C++","Programming Language :: Python :: 3","Topic :: Multimedia :: Sound/Audio :: Analysis","Topic :: Multimedia :: Sound/Audio :: Conversion","Topic :: Scientific/Engineering"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["numpy","sphinx; extra == \"docs\"","sphinx-book-theme; extra == \"docs\"","myst-parser; extra == \"docs\"","linkify-it-py; extra == \"docs\"","pytest; extra == \"test\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Bug tracker","Documentation","Homepage","Source"],"uploaded_via":"twine/5.1.0 CPython/3.12.5","upload_time":"2024-08-19 13:26:56.093348 UTC","filename":"soxr-0.5.0rc1-cp310-cp310-win_amd64.whl","size":"165789","path":"23/26/69aba192bc9d8df031d298141824fed8fbd58f734ab7565567ddda22c63a/soxr-0.5.0rc1-cp310-cp310-win_amd64.whl","python_version":"cp310","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"71de0d99414ff5a710ba74397ec543fa","sha256_digest":"7daedf7ac5a7b1f6d1f6289b251d52bc07b54fd898878a102e84735253795673","blake2_256_digest":"232669aba192bc9d8df031d298141824fed8fbd58f734ab7565567ddda22c63a","license_files":[]}
|
15 |
-
{"name":"obsidian-apo","version":"0.8.3","summary":"Automated experiment design and black-box optimization","description":"\u003c!---\nobsidian\nReadMe\n--\u003e\n\n\u003cdiv align = \"center\"\u003e\n \u003cimg src=\"https://github.com/MSDLLCpapers/obsidian/blob/main/docs/_static/obsidian_logo.svg?raw=true\" class=\"only-light\" width=\"100\" alt = \"obsidian logo\"\u003e\n\u003c/div\u003e\n\n\n\u003cdiv align=\"center\"\u003e\n\n\u003ch1\u003e obsidian\u003c/h1\u003e\n\n\n[](https://github.com/MSDLLCpapers/obsidian/blob/main/LICENSE)\n[](https://github.com/MSDLLCpapers/obsidian/issues)\n[](https://pypi.org/project/obsidian-apo/)\n[](https://msdllcpapers.github.io/obsidian/)\n[](https://codecov.io/github/kstone40/obsidian)\n\n__obsidian__ is a library for algorithmic process design and black-box optimization using AI-guided experiment design\n\n\n\u003c/div\u003e\n\n\nThe _obsidian_ library offers a set of modules for designing, executing, analyzing, and visualizing algorithmic process optimization (APO) using sample-efficient strategies such as Bayesian Optimization (BO). _obsidian_ uses highly flexible models to build internal representations of the measured system in a way that can be explored for characterization and exploited for maximization based on uncertainty estimation and exploration strategies. _obsidian_ supports batch experimentation (joint optimization and parallel evaluation) and is highly configurable for varying use cases, although the default specifications are encouraged.\n\n_We thank you for your patience and invite you to collaborate with us while __obsidian__ is in beta!_\n\n # Key Features\n\n 1. __End-User-Friendly__: Designed to elevate the average process development scientist. No machine learning experience required.\n 2. __Deployable__ using pre-built _Dash_ application. Each class is fully serializable, without third-party packages, to enable web-based API usage. \n 3. __Explainable__ and visualizable using SHAP analysis and interactive figures.\n 5. __Flexible__: Handles any input (numeric, discrete) and optionally input/output constraints, multiple outcomes, batch optimization, and a variety of novelty objective compositions. We know that experiment campaigns can have fluctuating objectives and resources, and _obsidian_ is built to support that.\n 6. __Purpose-Driven Development__: Impactful features proposed, developed, maintained, and used by laboratory bench scientists. Revelantly designed for process development, optimization, and characterization.\n\n# How it Works: Algorithmic Optimization\nThe workflow for algorithmic process optimization is an iterative workflow of the following steps:\n\n1. Collect data\n2. Fit a model to the data and estimate uncertainty across a design space\n3. Search for new experiments and evaluate for objective and/or informational utility\n4. Design experiments where utility is maximized\n5. Repeat\n\nThe central object ob the __obsidian__ library is the `BayesianOptimizer`, which can be optionally wrapped by a `Campaign`. A bayesian optimization has two key components that govern the optimization:\n1. The surrogate model: A black-box model which is regressed to data and used for inference. Most often a _Gaussian Process_ (`surrogate='GP'`).\n2. The acquisition function: A mathematical description of the quality of potential experiments, as it pertains to optimization. Most often _Expected Improvement_ (`acquisition=['EI']`).\n\n# Usage Example\n\n## Specify Parameters \u0026 Initialize a Design\n\n```python\nfrom obsidian import Campaign, ParamSpace, Target\nfrom obsidian.parameters import Param_Categorical, Param_Ordinal, Param_Continuous\n\nparams = [\n Param_Continuous('Temperature', -10, 30),\n Param_Continuous('Concentration', 10, 150),\n Param_Continuous('Enzyme', 0.01, 0.30),\n Param_Categorical('Variant', ['MRK001', 'MRK002', 'MRK003']),\n Param_Ordinal('Stir Rate', ['Low', 'Medium', 'High']),\n ]\n\nX_space = ParamSpace(params)\ntarget = Target('Yield', aim='max')\ncampaign = Campaign(X_space, target)\nX0 = campaign.designer.initialize(10, 'LHS', seed=0)\n```\n\n| | Temperature | Concentration | Enzyme | Variant | Stir Rate |\n|---:|--------------:|----------------:|---------:|:----------|:------------|\n| 0 | 8 | 17 | 0.1405 | MRK001 | Medium |\n| 1 | 12 | 143 | 0.1695 | MRK003 | Medium |\n| 2 | 4 | 101 | 0.2855 | MRK002 | High |\n| 3 | 28 | 87 | 0.1115 | MRK002 | Low |\n| 4 | -4 | 115 | 0.2275 | MRK001 | Low |\n| 5 | -8 | 73 | 0.0825 | MRK002 | Medium |\n| 6 | 20 | 129 | 0.0535 | MRK001 | High |\n| 7 | 24 | 31 | 0.2565 | MRK002 | Medium |\n| 8 | 16 | 59 | 0.1985 | MRK003 | High |\n| 9 | 0 | 45 | 0.0245 | MRK003 | Low |\n\n\n## Collect Data and Fit the Optimizer\n\n```python\ncampaign.add_data(Z0)\ncampaign.fit()\n```\n\n## Suggest New Experiments\n\n```python\ncampaign.optimizer.suggest(m_batch=2)\n```\n\n| | Temperature | Concentration | Enzyme | Variant | Stir Rate | Yield (pred) | Yield lb | Yield ub | aq Method | aq Value |\n|---:|--------------:|----------------:|----------:|:----------|:------------|---------------:|-----------:|-----------:|:------------|-----------:|\n| 0 | -10 | 10 | 0.0918096 | MRK001 | Medium | 112.497 | 102.558 | 122.436 | EI | 0.848569 |\n| 1 | -10 | 150 | 0.0882423 | MRK002 | High | 89.8334 | 79.8589 | 99.8079 | EI | 0.870511 |\n\n# Installation\n\nThe latest _obsidian_ release can be installed using pip:\n\n```python\npip install obsidian-apo\n```\n\nTo install the required dependencies for running the _Dash_ app:\n```python\npip install obsidian-apo[app]\n```\n\nBe sure to `pip` install in a newly created `conda` environment to avoid dependency conflicts.\n\n# Contributing\n\nSee [CONTRIBUTING](https://github.com/MSDLLCpapers/obsidian/blob/main/CONTRIBUTING.md) to learn more.\n\n## Developers\n\n- Kevin Stone (Merck \u0026 Co., Inc.) [[email protected]](mailto:[email protected])\n- Yuting Xu (Merck \u0026 Co., Inc.) [[email protected]](mailto:[email protected])\n\n## Contributors\n\n- Ajit Vikram (Merck \u0026 Co., Inc.)\n- Melodie Christensen (Merck \u0026 Co., Inc.)\n- Kobi Felton (Merck \u0026 Co., Inc.)\n\n## License\n__obsidian__ is licensed by the [GPLv3 license](https://github.com/MSDLLCpapers/obsidian/blob/main/LICENSE).","description_content_type":"text/markdown","author":"Kevin Stone","author_email":"[email protected]","keywords":"optimization, experiment design, Bayesian optimization, process development, APO, DOE","classifiers":["Development Status :: 4 - Beta","License :: OSI Approved :: GNU General Public License v3 (GPLv3)","Operating System :: OS Independent","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12"],"platform":[],"home_page":"https://msdllcpapers.github.io/obsidian/","requires_python":"\u003c4.0,\u003e=3.10","requires":[],"provides":[],"obsoletes":[],"requires_dist":["torch==2.3.0","botorch\u003c0.12.0,\u003e=0.11.1","gpytorch\u003c2.0,\u003e=1.11","pandas\u003c3.0.0,\u003e=2.2.2","numpy\u003c2.0,\u003e=1.26","scipy\u003c2.0.0,\u003e=1.13.1","pyyaml\u003c7.0.0,\u003e=6.0.1","shap\u003c0.46.0,\u003e=0.45.1","matplotlib\u003c4.0.0,\u003e=3.9.0","plotly\u003c6.0.0,\u003e=5.22.0","flask\u003c4.0.0,\u003e=3.0.3; extra == \"app\"","dash\u003c3.0.0,\u003e=2.17.1; extra == \"app\"","dash-daq\u003c0.6.0,\u003e=0.5.0; extra == \"app\"","dash-bootstrap-components\u003c2.0.0,\u003e=1.6.0; extra == \"app\"","pytest\u003c9.0.0,\u003e=8.2.2; extra == \"dev\"","xlrd\u003c3.0.0,\u003e=2.0.1; extra == \"dev\"","ipykernel\u003c7.0.0,\u003e=6.29.4; extra == \"dev\"","jupyterlab\u003c5.0.0,\u003e=4.2.2; extra == \"dev\"","flake8\u003c8.0.0,\u003e=7.1.0; extra == \"dev\"","pytest-cov\u003c6.0.0,\u003e=5.0.0; extra == \"dev\"","sphinx\u003c8.0.0,\u003e=7.3.7; extra == \"docs\"","myst-parser\u003c4.0.0,\u003e=3.0.1; extra == \"docs\"","pydata-sphinx-theme\u003c0.16.0,\u003e=0.15.4; extra == \"docs\"","linkify-it-py\u003c3.0.0,\u003e=2.0.3; extra == \"docs\"","scikit-learn\u003c2.0.0,\u003e=1.5.1"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changelog","Documentation","Repository"],"uploaded_via":"poetry/1.8.3 CPython/3.12.1 Linux/6.5.0-1025-azure","upload_time":"2024-08-22 02:33:13.111523 UTC","filename":"obsidian_apo-0.8.3-py3-none-any.whl","size":"136013","path":"92/77/c05de3f92e37f9e8ccd7855121ea2c85d0a6ad1a7558de7de3ada120ba74/obsidian_apo-0.8.3-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"ac8374b0e7217807f3ca66c8565f5f1c","sha256_digest":"5d74ab0999b447260bd0a6b6cfe0e9572fda5d15349241a92930454701ce786b","blake2_256_digest":"9277c05de3f92e37f9e8ccd7855121ea2c85d0a6ad1a7558de7de3ada120ba74","license_files":[]}
|
16 |
-
{"name":"synthegrator","version":"0.9.3.8","summary":"Framework for code synthesis and AI4SE research","description":"# Synthegrator\n\nSynthegrator is a framework for code generation problems. It simplifies\nthe process of loading common datasets and solving them with language models.\n\n# Installation\n```bash\npip install synthegrator\n```\n\nAlso, for execution you will need to [install docker](https://docs.docker.com/engine/install/).\n\n\n# Example\nLet's take a look at an example of how we can run a solver over\nthe HumanEval dataset, which collects 164 function synthesis problems.\n\n```python\n# Imports\nfrom lmwrapper.openai_wrapper import get_open_ai_lm, OpenAiModelNames\nfrom synthegrator.code_solver import LmCodeSolverAutoRegressive\nfrom synthegrator.execution_threading import solve_and_evaluate_problems\nfrom synthegrator.synthdatasets.human_eval import yield_human_eval\nfrom synthegrator.df_converters import solution_evals_to_df\n\n# Loading of a selection of AI4SE Datasets\nproblems = list(yield_human_eval())\n\n# Create a solver that can solve a problem\nlm = get_open_ai_lm(OpenAiModelNames.gpt_3_5_turbo_instruct)\n# ^ Make sure to add your API key to OPENAI_API_KEY or a file. \n# See https://github.com/DaiseyCode/lmwrapper for more.\nsolver = LmCodeSolverAutoRegressive(lm)\n\n# Generate code and execute problems testcases\nevals = list(solve_and_evaluate_problems(\n solver=solver,\n problems=problems,\n max_threads_eval=4,\n))\n# Convert to a dataframe\ndf = solution_evals_to_df(\n evals, \n pickle_gzip_whole_solution_eval=True\n)\nprint(\"Fraction Passing\", df.main_metric__is_success.mean())\n```\n\n# Architecture\n## Guiding Design Requirements\n- DR-1 **Support Diverse Datasets and Tasks.** We want an architecture that can\nsupport a diverse tasks (including potentially complex, repository-level tasks).\n- DR-2 **Consistent \u0026 Efficient Execution.** Experiments often involve running LLM-generated code. We want this to be fast, efficient, and reasonably secure.\n- DR-3 **Adaptable to State-of-the-Art Models.** This includes models like those from OpenAI or on HuggingFace. Additionally be adaptable to models\nthat might do complex retrieval or reasoning\n- DR-4 **Maintainable.** Try to follow best practices around automated testing and continuous integration.\n\n## Diagram\n\n\nTODO, add docs walking through each component\n\n# Datasets and Solvers\ndocs TODO\n","description_content_type":"text/markdown","author":"David Gros, Claudio Spiess","license":"Copyright 2024 David Gros, Claudio Spiess Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ","keywords":"code synthesis, llm","classifiers":["Programming Language :: Python :: 3"],"platform":[],"requires_python":"\u003e=3.10","requires":[],"provides":[],"obsoletes":[],"requires_dist":["lmwrapper[hf]\u003c0.10,\u003e=0.8.7.4","numpy\u003c2.0,\u003e=1.24.3","Pygments\u003c3.0,\u003e=2.15.1","tqdm\u003c5.0,\u003e=4.65.0","datasets\u003c3.0,\u003e=2.12.0","libcst\u003c2.0,\u003e=1.0.1","tree-sitter-languages\u003c2.0,\u003e=1.7.0","pytest\u003c8.0,\u003e=7.4.0","lxml\u003e=4.9.3","xxhash\u003c4.0,\u003e=3.3.0","typeguard\u003c5.0,\u003e=4.1","rank-bm25\u003c0.3,\u003e=0.2.2","docker\u003e=2","python-dateutil\u003e=2.4","requests\u003e=2.14.2","structlog\u003e=15.3","ruff\u003e=0.2.2; extra == \"dev\"","pytest~=7.4.2; extra == \"dev\"","pytest-cov~=4.1.0; extra == \"dev\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Homepage"],"uploaded_via":"twine/5.0.0 CPython/3.10.14","upload_time":"2024-04-24 21:06:29.422213 UTC","filename":"synthegrator-0.9.3.8-py3-none-any.whl","size":"3233724","path":"39/af/5619b552130de38dd6cb47e51f9d7a7f06d54283282861ed54baac5a3bc8/synthegrator-0.9.3.8-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"72d8b7ce23a734015e6c7fab314f20f0","sha256_digest":"e7d080e327c602e7f8217dd712e8f39d488e6f63ca92bbc52dafe4ca81359f12","blake2_256_digest":"39af5619b552130de38dd6cb47e51f9d7a7f06d54283282861ed54baac5a3bc8","license_files":[]}
|
17 |
-
{"name":"jacobi-motion","version":"0.0.36","summary":"Next-Generation Motion Planning.","description":"# Jacobi Motion Library\n\nThis is the **Jacobi Motion Library** for robot motion planning. It provides a clean API for efficient algorithms that compute time-optimized, jerk-limited robot arm trajectories in milliseconds.\n\nFor instructions regrading installation, getting started, and for general documentation we refer to [docs.jacobirobotics.com](https://docs.jacobirobotics.com).","description_content_type":"text/markdown","author_email":"Lars Berscheid \[email protected]\u003e","license":"Copyright (c) 2022-2024 Jacobi Robotics Inc. --- License Attributions This software depends on included third-party libraries that require reproduction of the license text in our distributions. - pybind11 Copyright (c) 2016 Wenzel Jakob \[email protected]\u003e, All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Please also refer to the file .github/CONTRIBUTING.md, which clarifies licensing of external contributions to this project including patches, pull requests, etc. - uWebSockets Copyright [Year ???] [Copyright Holder ???] Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - hpp-fcl Software License Agreement (BSD License) Copyright (c) 2008-2014, Willow Garage, Inc. Copyright (c) 2014-2015, Open Source Robotics Foundation Copyright (c) 2014-2023, CNRS Copyright (c) 2018-2024, INRIA All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Open Source Robotics Foundation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.","keywords":"robotics, motion-planning, trajectory-generation, obstacle-avoidance","classifiers":["Development Status :: 3 - Alpha","Intended Audience :: Science/Research","License :: Other/Proprietary License","Programming Language :: C++","Topic :: Scientific/Engineering"],"platform":[],"home_page":"https://www.jacobirobotics.com","requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":[],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Homepage"],"uploaded_via":"twine/5.1.0 CPython/3.12.5","upload_time":"2024-08-19 15:55:45.970807 UTC","filename":"jacobi_motion-0.0.36-cp311-cp311-macosx_10_15_x86_64.whl","size":"7853757","path":"24/8e/139320f0038ddb9f6f651a8a845ff87f0a0c81f8b49fe84ca482647000e4/jacobi_motion-0.0.36-cp311-cp311-macosx_10_15_x86_64.whl","python_version":"cp311","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"387aad83bea1c3bc6b9adb5c660bedba","sha256_digest":"d13e9c7ca1ac8c68fe924d740d137037ce8d004f80b601addd2d31bc2c716989","blake2_256_digest":"248e139320f0038ddb9f6f651a8a845ff87f0a0c81f8b49fe84ca482647000e4","license_files":[]}
|
18 |
-
{"name":"openplantbook-sdk","version":"0.4.6","summary":"Open Plantbook SDK for Python","description":"# Open Plantbook SDK for Python\n\n[](https://openplantbook-sdk-py.readthedocs.io/en/latest/?badge=latest)\n[](https://badge.fury.io/py/openplantbook-sdk)\n\nThis is an SDK to integrate with [Open Plantbook](https://open.plantbook.io) API. \n\nMore information about Open Plantbook and documentation can be found [here](https://github.com/slaxor505/OpenPlantbook-client).\nIt requires registration and API credentials which can be generated on Open Plantbook website.\n\nSee [API documentation](https://documenter.getpostman.com/view/12627470/TVsxBRjD) for details about returned values by the SDK.\n[Discord](https://discord.gg/dguPktq9Zh) for support and questions \n\n## Installation\n\n```shell\npip install openplantbook-sdk\n```\n\nImport or require module\n\n```python\nfrom openplantbook_sdk import OpenPlantBookApi\n```\n\n## Usage\n\nSee [demo.py](demo.py)\n\n\n## License\nMIT\n","description_content_type":"text/markdown","author":"Slava Pisarevskiy","author_email":"[email protected]","license":"MIT","keywords":"json, timeseries, iot, jts","classifiers":["Intended Audience :: Developers","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3.9"],"platform":[],"home_page":"https://github.com/slaxor505/openplantbook-sdk-py","requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["aiohttp","json-timeseries"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Source"],"uploaded_via":"twine/4.0.2 CPython/3.10.12","upload_time":"2024-05-19 11:59:07.141149 UTC","filename":"openplantbook_sdk-0.4.6-py3-none-any.whl","size":"9125","path":"62/23/53dd30dc4edf2f929190d9f6a8b307cb403d61bf9c6729eed69afad4bc04/openplantbook_sdk-0.4.6-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"504f8b417ec16a51e88a5a5e29201b11","sha256_digest":"65c8f2e0c3617150e1ee4c5cf941f743ee02dd37fa05e546185469170a3ba094","blake2_256_digest":"622353dd30dc4edf2f929190d9f6a8b307cb403d61bf9c6729eed69afad4bc04","license_files":[]}
|
19 |
-
{"name":"silver-platter","version":"0.5.30","summary":"Large scale VCS change management","description":"Silver-Platter\n==============\n\n.. image:: logo.png\n :alt: Silver-Platter logo\n :align: center\n :width: 200px\n\nSilver-Platter makes it possible to contribute automatable changes to source\ncode in a version control system\n(`codemods \u003chttps://github.com/jelmer/awesome-codemods\u003e`_).\n\nIt automatically creates a local checkout of a remote repository,\nmakes user-specified changes, publishes those changes on the remote hosting\nsite and then creates a pull request.\n\nIn addition to that, it can also perform basic maintenance on branches\nthat have been proposed for merging - such as restarting them if they\nhave conflicts due to upstream changes.\n\nSilver-Platter powers the Debian Janitor (https://janitor.debian.org/) and\nKali Janitor (https://kali.janitor.org/). However, it is an independent project\nand can be used fine as a standalone tool. The UI is still a bit rough around\nthe edges, I'd be grateful for any feedback from people using it - please file bugs in\nthe issue tracker at https://github.com/jelmer/silver-platter/issues/new.\n\nGetting started\n~~~~~~~~~~~~~~~\n\nTo log in to a code-hosting site, use ``svp login``::\n\n svp login https://github.com/\n\nThe simplest way to create a change as a merge proposal is to run something like::\n\n svp run --mode=propose ./framwork.sh https://github.com/jelmer/dulwich\n\nwhere ``framwork.sh`` makes some modifications to a working copy and prints the\ncommit message and body for the pull request to standard out. For example::\n\n #!/bin/sh\n sed -i 's/framwork/framework/' README.rst\n echo \"Fix common typo: framwork ⇒ framework\"\n\nIf you leave pending changes, silver-platter will automatically create a commit\nand use the output from the script as the commit message. Scripts also\ncreate their own commits if they prefer - this is especially useful if they\nwould like to create multiple commits.\n\nRecipes\n~~~~~~~\n\nTo make this process a little bit easier to repeat, recipe files can be used.\nFor the example above, we could create a ``framwork.yaml`` with the following\ncontents::\n\n ---\n name: framwork\n command: |-\n sed -i 's/framwork/framework/' README.rst\n echo \"Fix common typo: framwork ⇒ framework\"\n mode: propose\n merge-request:\n commit-message: Fix a typo\n description:\n markdown: |-\n I spotted that we often mistype *framework* as *framwork*.\n\nTo execute this recipe, run::\n\n svp run --recipe=framwork.yaml https://github.com/jelmer/dulwich\n\nSee `example.yaml` for an example recipe with plenty of comments.\n\nIn addition, you can run a particular recipe over a set of repositories by\nspecifying a candidate list.\nFor example, if *candidates.yaml* looked like this::\n\n ---\n - url: https://github.com/dulwich/dulwich\n - url: https://github.com/jelmer/xandikos\n\nthen the following command would process each repository in turn::\n\n svp run --recipe=framwork.yaml --candidates=candidates.yaml\n\nBatch Mode\n~~~~~~~~~~\n\nUse batch mode when you're going to make a large number of changes and would\nlike to review or modify the diffs before sending them out::\n\n svp batch generate --recipe=framwork.yaml --candidates=candidate.syml framwork\n\nThis will then create a directory called \"framwork\", with a file called\n``batch.yaml`` with all the pending changes::\n\n name: framwork\n work:\n - url: https://github.com/dulwich/dulwich\n name: dulwich\n description: I spotted that we often mistype *framework* as *framwork*.\n commit-message: Fix a typo\n mode: propose\n - url: https://github.com/jelmer/xandikos\n name: dulwich\n description: I spotted that we often mistype *framework* as *framwork*.\n commit-message: Fix a typo\n mode: propose\n recipe: ../framwork.yaml\n\nFor each of the candidates, a clone with the changes is created. You can introspect\nand modify the clones as appropriate.\n\nAfter you review the changes, edit batch.yaml as you see fit - remove\nentries that don't appear to be correct, edit the details for the merge\nrequests, etc.\n\nOnce you're happy, you can publish the results::\n\n svp batch publish framwork\n\nThis will publish all the changes, using the mode and parameters specified in\n``batch.yaml``.\n\n``batch.yaml`` is automatically stripped of any entries in work that have fully\nlanded, i.e. where the pull request has been merged or where the changes were\npushed to the origin.\n\nTo check up on the status of your changes, run ``svp batch status``::\n\n svp batch status framwork\n\nAnd to refresh any merge proposals that may have become out of date,\nrun publish again::\n\n svp batch publish framwork\n\nSupported hosters\n~~~~~~~~~~~~~~~~~\n\nAt the moment, the following code hosters are supported:\n\n* `GitHub \u003chttps://github.com/\u003e`_\n* `Launchpad \u003chttps://launchpad.net/\u003e`_\n* `GitLab \u003chttps://gitlab.com/\u003e`_ instances, such as Debian's\n `Salsa \u003chttps://salsa.debian.org\u003e`_ or `GNOME's GitLab \u003chttps://gitlab.gnome.org/\u003e`_\n\nWorking with Debian packages\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nSeveral common operations for Debian packages have dedicated subcommands\nunder the ``debian-svp`` command. These will also automatically look up\npackaging repository location for any Debian package names that are\nspecified.\n\n* *upload-pending*: Build and upload a package and push/propose the\n changelog updates.\n* *run*: Similar to *svp run* but specific to Debian packages:\n it ensures that the *upstream* and *pristine-tar* branches are available as\n well, can optionally update the changelog, and can test that the branch still\n builds.\n\nSome Debian-specific example recipes are provided in `examples/debian/`:\n\n* *lintian-fixes.yaml*: Run the `lintian-brush\n \u003chttps://packages.debian.org/lintian-brush\u003e`_ command to\n fix common issues reported by `lintian\n \u003chttps://salsa.debian.org/qa/lintian\u003e`_.\n* *new-upstream-release.yaml*: Merge in a new upstream release.\n* *multi-arch-hints.yaml*: Apply multi-arch hints.\n* *orphan.yaml*: Mark a package as orphaned, update its Maintainer\n field and move it to the common Debian salsa group.\n* *rules-requires-root.yaml*: Mark a package as \"Rules-Requires-Root: no\"\n* *cme.yaml*: Run \"cme fix dpkg\", from the\n `cme package \u003chttps://packages.debian.org/cme\u003e`_.\n\n*debian-svp run* takes package name arguments that will be resolved\nto repository locations from the *Vcs-Git* field in the package.\n\nSee ``debian-svp COMMAND --help`` for more details.\n\nExamples running ``debian-svp``::\n\n # Create merge proposal running lintian-brush against Samba\n debian-svp run --recipe=examples/lintian-brush.yaml samba\n\n # Upload pending changes for tdb\n debian-svp upload-pending tdb\n\n # Upload pending changes for any packages maintained by Jelmer,\n # querying vcswatch.\n debian-svp upload-pending --vcswatch --maintainer [email protected]\n\n # Import the latest upstream release for tdb, without testing\n # the build afterwards.\n debian-svp run --recipe=examples/debian/new-upstream-release.yaml \\\n --no-build-verify tdb\n\n # Apply multi-arch hints to tdb\n debian-svp run --recipe=examples/debian/multiarch-hints.yaml tdb\n\nThe following environment variables are provided for Debian packages:\n\n* ``DEB_SOURCE``: the source package name\n* ``DEB_UPDATE_CHANGELOG``: indicates whether a changelog entry should\n be added. Either \"leave\" (leave alone) or \"update\" (update changelog).\n\nCredentials\n~~~~~~~~~~~\n\nThe ``svp hosters`` subcommand can be used to display the hosting sites that\nsilver-platter is aware of::\n\n svp hosters\n\nAnd to log into a new hosting site, simply run ``svp login BASE-URL``, e.g.::\n\n svp login https://launchpad.net/\n\nExit status\n~~~~~~~~~~~\n\n``svp run`` will exit 0 if no changes have been made, 1 if at least one\nrepository has been changed and 2 in case of trouble.\n\nPython API\n~~~~~~~~~~\n\nOther than the command-line API, silver-platter also has a Python API.\nThe core class is the ``Workspace`` context manager, which exists in two forms:\n\n * ``silver_platter.workspace.Workspace`` (for generic projects)\n * ``silver_platter.debian.Workspace`` (for Debian packages)\n\nAn example, adding a new entry to a changelog file in the ``dulwich`` Debian\npackage and creating a merge proposal with that change::\n\n from silver_platter.debian import Workspace\n import subprocess\n\n with Workspace.from_apt_package(package=\"dulwich\") as ws:\n subprocess.check_call(['dch', 'some change'], cwd=ws.path)\n ws.commit() # Behaves like debcommit\n ws.publish(mode='propose')\n","description_content_type":"text/x-rst","author_email":"Jelmer Vernooij \[email protected]\u003e","license":"GNU GPL v2 or later","keywords":"git bzr vcs github gitlab launchpad","classifiers":["Development Status :: 3 - Alpha","License :: OSI Approved :: GNU General Public License (GPL)","Operating System :: POSIX","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: Implementation :: CPython","Programming Language :: Python :: Implementation :: PyPy","Topic :: Software Development :: Version Control"],"platform":[],"requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["breezy\u003e=3.3.3","dulwich\u003e=0.20.23","jinja2","pyyaml","ruamel.yaml","debmutate\u003e=0.3; extra == \"debian\"","python-debian\u003e=0.1.48; extra == \"debian\"","brz-debian; extra == \"debian\"","lintian-brush; extra == \"detect-gbp-dch\"","ruff==0.5.6; extra == \"dev\"","launchpadlib; extra == \"launchpad\"","testtools; extra == \"testing\"","debmutate\u003e=0.3; extra == \"testing\"","python-debian; extra == \"testing\"","brz-debian; extra == \"testing\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Bug Tracker","GitHub","Homepage","Repository"],"uploaded_via":"twine/5.1.0 CPython/3.12.5","upload_time":"2024-08-08 23:13:36.402656 UTC","filename":"silver_platter-0.5.30-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl","size":"4906053","path":"b6/33/af701f749b69262e25f87dcbf1094afd6d12b53154d1f0130e1abecd837b/silver_platter-0.5.30-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl","python_version":"cp313","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"208b3fd136fd0f839d5a30c71fdfef00","sha256_digest":"ba81679136cf3658e5da7c17333b9af3af4836ba76c29e2b170677c2de8875f8","blake2_256_digest":"b633af701f749b69262e25f87dcbf1094afd6d12b53154d1f0130e1abecd837b","license_files":[]}
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:04cb16fee3be890d0b4a8405061ceb26287e14237164a4ddd33dfebc062f6ad9
|
3 |
+
size 73821456
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pypi-packages-metadata-000000000354.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6295e196c2c0a6bae26a727e78731ea0cbcd805c96b863dc343dc767abb129d4
|
3 |
+
size 63457501
|
pypi-packages-metadata-000000000378.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c329a7f5a2e41d3b34ceb8c186f13f01ad70dc2bc3c071f76e0d5d59f550d76
|
3 |
+
size 74968411
|
pypi-packages-metadata-000000000393.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ee104d2b4c43ac2cce802de606948a1a92d04e1b16bb3130b22854fd09056aee
|
3 |
+
size 76594256
|
pypi-packages-metadata-000000000414.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000423.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000427.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e3f4c86cb7abf0d6f2a94aca778ab69f5bd4d9c9c9646c49a0952058401545d8
|
3 |
+
size 62071032
|
pypi-packages-metadata-000000000434.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000444.json
CHANGED
@@ -1,9 +1,3 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
{"name":"vector-quantize-pytorch","version":"1.14.45","summary":"Vector Quantization - Pytorch","description":"\u003cimg src=\"./images/vq.png\" width=\"500px\"\u003e\u003c/img\u003e\n\n## Vector Quantization - Pytorch\n\nA vector quantization library originally transcribed from Deepmind's tensorflow implementation, made conveniently into a package. It uses exponential moving averages to update the dictionary.\n\nVQ has been successfully used by Deepmind and OpenAI for high quality generation of images (VQ-VAE-2) and music (Jukebox).\n\n## Install\n\n```bash\n$ pip install vector-quantize-pytorch\n```\n\n## Usage\n\n```python\nimport torch\nfrom vector_quantize_pytorch import VectorQuantize\n\nvq = VectorQuantize(\n dim = 256,\n codebook_size = 512, # codebook size\n decay = 0.8, # the exponential moving average decay, lower means the dictionary will change faster\n commitment_weight = 1. # the weight on the commitment loss\n)\n\nx = torch.randn(1, 1024, 256)\nquantized, indices, commit_loss = vq(x) # (1, 1024, 256), (1, 1024), (1)\n\n```\n\n## Residual VQ\n\nThis \u003ca href=\"https://arxiv.org/abs/2107.03312\"\u003epaper\u003c/a\u003e proposes to use multiple vector quantizers to recursively quantize the residuals of the waveform. You can use this with the `ResidualVQ` class and one extra initialization parameter.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import ResidualVQ\n\nresidual_vq = ResidualVQ(\n dim = 256,\n num_quantizers = 8, # specify number of quantizers\n codebook_size = 1024, # codebook size\n)\n\nx = torch.randn(1, 1024, 256)\n\nquantized, indices, commit_loss = residual_vq(x)\nprint(quantized.shape, indices.shape, commit_loss.shape)\n# (1, 1024, 256), (1, 1024, 8), (1, 8)\n\n# if you need all the codes across the quantization layers, just pass return_all_codes = True\n\nquantized, indices, commit_loss, all_codes = residual_vq(x, return_all_codes = True)\n\n# (8, 1, 1024, 256)\n```\n\nFurthermore, \u003ca href=\"https://arxiv.org/abs/2203.01941\"\u003ethis paper\u003c/a\u003e uses Residual-VQ to construct the RQ-VAE, for generating high resolution images with more compressed codes.\n\nThey make two modifications. The first is to share the codebook across all quantizers. The second is to stochastically sample the codes rather than always taking the closest match. You can use both of these features with two extra keyword arguments.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import ResidualVQ\n\nresidual_vq = ResidualVQ(\n dim = 256,\n num_quantizers = 8,\n codebook_size = 1024,\n stochastic_sample_codes = True,\n sample_codebook_temp = 0.1, # temperature for stochastically sampling codes, 0 would be equivalent to non-stochastic\n shared_codebook = True # whether to share the codebooks for all quantizers or not\n)\n\nx = torch.randn(1, 1024, 256)\nquantized, indices, commit_loss = residual_vq(x)\n\n# (1, 1024, 256), (1, 1024, 8), (1, 8)\n```\n\n\u003ca href=\"https://arxiv.org/abs/2305.02765\"\u003eA recent paper\u003c/a\u003e further proposes to do residual VQ on groups of the feature dimension, showing equivalent results to Encodec while using far fewer codebooks. You can use it by importing `GroupedResidualVQ`\n\n```python\nimport torch\nfrom vector_quantize_pytorch import GroupedResidualVQ\n\nresidual_vq = GroupedResidualVQ(\n dim = 256,\n num_quantizers = 8, # specify number of quantizers\n groups = 2,\n codebook_size = 1024, # codebook size\n)\n\nx = torch.randn(1, 1024, 256)\n\nquantized, indices, commit_loss = residual_vq(x)\n\n# (1, 1024, 256), (2, 1, 1024, 8), (2, 1, 8)\n```\n\n## Initialization\n\nThe SoundStream paper proposes that the codebook should be initialized by the kmeans centroids of the first batch. You can easily turn on this feature with one flag `kmeans_init = True`, for either `VectorQuantize` or `ResidualVQ` class\n\n```python\nimport torch\nfrom vector_quantize_pytorch import ResidualVQ\n\nresidual_vq = ResidualVQ(\n dim = 256,\n codebook_size = 256,\n num_quantizers = 4,\n kmeans_init = True, # set to True\n kmeans_iters = 10 # number of kmeans iterations to calculate the centroids for the codebook on init\n)\n\nx = torch.randn(1, 1024, 256)\nquantized, indices, commit_loss = residual_vq(x)\n\n# (1, 1024, 256), (1, 1024, 4), (1, 4)\n```\n\n## Increasing codebook usage\n\nThis repository will contain a few techniques from various papers to combat \"dead\" codebook entries, which is a common problem when using vector quantizers.\n\n### Lower codebook dimension\n\nThe \u003ca href=\"https://openreview.net/forum?id=pfNyExj7z2\"\u003eImproved VQGAN paper\u003c/a\u003e proposes to have the codebook kept in a lower dimension. The encoder values are projected down before being projected back to high dimensional after quantization. You can set this with the `codebook_dim` hyperparameter.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import VectorQuantize\n\nvq = VectorQuantize(\n dim = 256,\n codebook_size = 256,\n codebook_dim = 16 # paper proposes setting this to 32 or as low as 8 to increase codebook usage\n)\n\nx = torch.randn(1, 1024, 256)\nquantized, indices, commit_loss = vq(x)\n\n# (1, 1024, 256), (1, 1024), (1,)\n```\n\n### Cosine similarity\n\nThe \u003ca href=\"https://openreview.net/forum?id=pfNyExj7z2\"\u003eImproved VQGAN paper\u003c/a\u003e also proposes to l2 normalize the codes and the encoded vectors, which boils down to using cosine similarity for the distance. They claim enforcing the vectors on a sphere leads to improvements in code usage and downstream reconstruction. You can turn this on by setting `use_cosine_sim = True`\n\n```python\nimport torch\nfrom vector_quantize_pytorch import VectorQuantize\n\nvq = VectorQuantize(\n dim = 256,\n codebook_size = 256,\n use_cosine_sim = True # set this to True\n)\n\nx = torch.randn(1, 1024, 256)\nquantized, indices, commit_loss = vq(x)\n\n# (1, 1024, 256), (1, 1024), (1,)\n```\n\n### Expiring stale codes\n\nFinally, the SoundStream paper has a scheme where they replace codes that have hits below a certain threshold with randomly selected vector from the current batch. You can set this threshold with `threshold_ema_dead_code` keyword.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import VectorQuantize\n\nvq = VectorQuantize(\n dim = 256,\n codebook_size = 512,\n threshold_ema_dead_code = 2 # should actively replace any codes that have an exponential moving average cluster size less than 2\n)\n\nx = torch.randn(1, 1024, 256)\nquantized, indices, commit_loss = vq(x)\n\n# (1, 1024, 256), (1, 1024), (1,)\n```\n\n### Orthogonal regularization loss\n\nVQ-VAE / VQ-GAN is quickly gaining popularity. A \u003ca href=\"https://arxiv.org/abs/2112.00384\"\u003erecent paper\u003c/a\u003e proposes that when using vector quantization on images, enforcing the codebook to be orthogonal leads to translation equivariance of the discretized codes, leading to large improvements in downstream text to image generation tasks.\n\nYou can use this feature by simply setting the `orthogonal_reg_weight` to be greater than `0`, in which case the orthogonal regularization will be added to the auxiliary loss outputted by the module.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import VectorQuantize\n\nvq = VectorQuantize(\n dim = 256,\n codebook_size = 256,\n accept_image_fmap = True, # set this true to be able to pass in an image feature map\n orthogonal_reg_weight = 10, # in paper, they recommended a value of 10\n orthogonal_reg_max_codes = 128, # this would randomly sample from the codebook for the orthogonal regularization loss, for limiting memory usage\n orthogonal_reg_active_codes_only = False # set this to True if you have a very large codebook, and would only like to enforce the loss on the activated codes per batch\n)\n\nimg_fmap = torch.randn(1, 256, 32, 32)\nquantized, indices, loss = vq(img_fmap) # (1, 256, 32, 32), (1, 32, 32), (1,)\n\n# loss now contains the orthogonal regularization loss with the weight as assigned\n```\n\n### Multi-headed VQ\n\nThere has been a number of papers that proposes variants of discrete latent representations with a multi-headed approach (multiple codes per feature). I have decided to offer one variant where the same codebook is used to vector quantize across the input dimension `head` times.\n\nYou can also use a more proven approach (memcodes) from \u003ca href=\"https://github.com/lucidrains/nwt-pytorch\"\u003eNWT paper\u003c/a\u003e\n\n```python\nimport torch\nfrom vector_quantize_pytorch import VectorQuantize\n\nvq = VectorQuantize(\n dim = 256,\n codebook_dim = 32, # a number of papers have shown smaller codebook dimension to be acceptable\n heads = 8, # number of heads to vector quantize, codebook shared across all heads\n separate_codebook_per_head = True, # whether to have a separate codebook per head. False would mean 1 shared codebook\n codebook_size = 8196,\n accept_image_fmap = True\n)\n\nimg_fmap = torch.randn(1, 256, 32, 32)\nquantized, indices, loss = vq(img_fmap)\n\n# (1, 256, 32, 32), (1, 32, 32, 8), (1,)\n\n```\n\n### Random Projection Quantizer\n\n\u003ca href=\"https://arxiv.org/abs/2202.01855\"\u003eThis paper\u003c/a\u003e first proposed to use a random projection quantizer for masked speech modeling, where signals are projected with a randomly initialized matrix and then matched with a random initialized codebook. One therefore does not need to learn the quantizer. This technique was used by Google's \u003ca href=\"https://ai.googleblog.com/2023/03/universal-speech-model-usm-state-of-art.html\"\u003eUniversal Speech Model\u003c/a\u003e to achieve SOTA for speech-to-text modeling.\n\nUSM further proposes to use multiple codebook, and the masked speech modeling with a multi-softmax objective. You can do this easily by setting `num_codebooks` to be greater than 1\n\n```python\nimport torch\nfrom vector_quantize_pytorch import RandomProjectionQuantizer\n\nquantizer = RandomProjectionQuantizer(\n dim = 512, # input dimensions\n num_codebooks = 16, # in USM, they used up to 16 for 5% gain\n codebook_dim = 256, # codebook dimension\n codebook_size = 1024 # codebook size\n)\n\nx = torch.randn(1, 1024, 512)\nindices = quantizer(x)\n\n# (1, 1024, 16)\n```\n\nThis repository should also automatically synchronizing the codebooks in a multi-process setting. If somehow it isn't, please open an issue. You can override whether to synchronize codebooks or not by setting `sync_codebook = True | False`\n\n### Finite Scalar Quantization\n\n\u003cimg src=\"./images/fsq.png\" width=\"500px\"\u003e\u003c/img\u003e\n\n| | VQ | FSQ |\n|------------------|----|-----|\n| Quantization | argmin_c \\|\\| z-c \\|\\| | round(f(z)) |\n| Gradients | Straight Through Estimation (STE) | STE |\n| Auxiliary Losses | Commitment, codebook, entropy loss, ... | N/A |\n| Tricks | EMA on codebook, codebook splitting, projections, ...| N/A |\n| Parameters | Codebook | N/A |\n\n[This](https://arxiv.org/abs/2309.15505) work out of Google Deepmind aims to vastly simplify the way vector quantization is done for generative modeling, removing the need for commitment losses, EMA updating of the codebook, as well as tackle the issues with codebook collapse or insufficient utilization. They simply round each scalar into discrete levels with straight through gradients; the codes become uniform points in a hypercube.\n\nThanks goes out to [@sekstini](https://github.com/sekstini) for porting over this implementation in record time!\n\n```python\nimport torch\nfrom vector_quantize_pytorch import FSQ\n\nquantizer = FSQ(\n levels = [8, 5, 5, 5]\n)\n\nx = torch.randn(1, 1024, 4) # 4 since there are 4 levels\nxhat, indices = quantizer(x)\n\n# (1, 1024, 4), (1, 1024)\n\nassert torch.all(xhat == quantizer.indices_to_codes(indices))\n```\n\nAn improvised Residual FSQ, for an attempt to improve audio encoding. \n\nCredit goes to [@sekstini](https://github.com/sekstini) for originally incepting the idea [here](https://github.com/lucidrains/vector-quantize-pytorch/pull/74#issuecomment-1742048597)\n\n```python\nimport torch\nfrom vector_quantize_pytorch import ResidualFSQ\n\nresidual_fsq = ResidualFSQ(\n dim = 256,\n levels = [8, 5, 5, 3],\n num_quantizers = 8\n)\n\nx = torch.randn(1, 1024, 256)\n\nresidual_fsq.eval()\n\nquantized, indices = residual_fsq(x)\n\n# (1, 1024, 256), (1, 1024, 8)\n\nquantized_out = residual_fsq.get_output_from_indices(indices)\n\n# (1, 1024, 256)\n\nassert torch.all(quantized == quantized_out)\n```\n\n### Lookup Free Quantization\n\n\u003cimg src=\"./images/lfq.png\" width=\"450px\"\u003e\u003c/img\u003e\n\nThe research team behind \u003ca href=\"https://arxiv.org/abs/2212.05199\"\u003eMagViT\u003c/a\u003e has released new SOTA results for generative video modeling. A core change between v1 and v2 include a new type of quantization, look-up free quantization (LFQ), which eliminates the codebook and embedding lookup entirely.\n\nThis paper presents a simple LFQ quantizer of using independent binary latents. Other implementations of LFQ exist. However, the team shows that MAGVIT-v2 with LFQ significantly improves on the ImageNet benchmark. The differences between LFQ and 2-level FSQ includes entropy regularizations as well as maintained commitment loss.\n\nDeveloping a more advanced method of LFQ quantization without codebook-lookup could revolutionize generative modeling.\n\nYou can use it simply as follows. Will be dogfooded at \u003ca href=\"https://github.com/lucidrains/magvit2-pytorch\"\u003eMagViT2 pytorch port\u003c/a\u003e\n\n```python\nimport torch\nfrom vector_quantize_pytorch import LFQ\n\n# you can specify either dim or codebook_size\n# if both specified, will be validated against each other\n\nquantizer = LFQ(\n codebook_size = 65536, # codebook size, must be a power of 2\n dim = 16, # this is the input feature dimension, defaults to log2(codebook_size) if not defined\n entropy_loss_weight = 0.1, # how much weight to place on entropy loss\n diversity_gamma = 1. # within entropy loss, how much weight to give to diversity of codes, taken from https://arxiv.org/abs/1911.05894\n)\n\nimage_feats = torch.randn(1, 16, 32, 32)\n\nquantized, indices, entropy_aux_loss = quantizer(image_feats, inv_temperature=100.) # you may want to experiment with temperature\n\n# (1, 16, 32, 32), (1, 32, 32), ()\n\nassert (quantized == quantizer.indices_to_codes(indices)).all()\n```\n\nYou can also pass in video features as `(batch, feat, time, height, width)` or sequences as `(batch, seq, feat)`\n\n```python\nimport torch\nfrom vector_quantize_pytorch import LFQ\n\nquantizer = LFQ(\n codebook_size = 65536,\n dim = 16,\n entropy_loss_weight = 0.1,\n diversity_gamma = 1.\n)\n\nseq = torch.randn(1, 32, 16)\nquantized, *_ = quantizer(seq)\n\nassert seq.shape == quantized.shape\n\nvideo_feats = torch.randn(1, 16, 10, 32, 32)\nquantized, *_ = quantizer(video_feats)\n\nassert video_feats.shape == quantized.shape\n```\n\nOr support multiple codebooks\n\n```python\nimport torch\nfrom vector_quantize_pytorch import LFQ\n\nquantizer = LFQ(\n codebook_size = 4096,\n dim = 16,\n num_codebooks = 4 # 4 codebooks, total codebook dimension is log2(4096) * 4\n)\n\nimage_feats = torch.randn(1, 16, 32, 32)\n\nquantized, indices, entropy_aux_loss = quantizer(image_feats)\n\n# (1, 16, 32, 32), (1, 32, 32, 4), ()\n\nassert image_feats.shape == quantized.shape\nassert (quantized == quantizer.indices_to_codes(indices)).all()\n```\n\nAn improvised Residual LFQ, to see if it can lead to an improvement for audio compression.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import ResidualLFQ\n\nresidual_lfq = ResidualLFQ(\n dim = 256,\n codebook_size = 256,\n num_quantizers = 8\n)\n\nx = torch.randn(1, 1024, 256)\n\nresidual_lfq.eval()\n\nquantized, indices, commit_loss = residual_lfq(x)\n\n# (1, 1024, 256), (1, 1024, 8), (8)\n\nquantized_out = residual_lfq.get_output_from_indices(indices)\n\n# (1, 1024, 256)\n\nassert torch.all(quantized == quantized_out)\n```\n\n### Latent Quantization\n\nDisentanglement is essential for representation learning as it promotes interpretability, generalization, improved learning, and robustness. It aligns with the goal of capturing meaningful and independent features of the data, facilitating more effective use of learned representations across various applications. For better disentanglement, the challenge is to disentangle underlying variations in a dataset without explicit ground truth information. This work introduces a key inductive bias aimed at encoding and decoding within an organized latent space. The strategy incorporated encompasses discretizing the latent space by assigning discrete code vectors through the utilization of an individual learnable scalar codebook for each dimension. This methodology enables their models to surpass robust prior methods effectively.\n\nBe aware they had to use a very high weight decay for the results in this paper.\n\n```python\nimport torch\nfrom vector_quantize_pytorch import LatentQuantize\n\n# you can specify either dim or codebook_size\n# if both specified, will be validated against each other\n\nquantizer = LatentQuantize(\n levels = [5, 5, 8], # number of levels per codebook dimension\n dim = 16, # input dim\n commitment_loss_weight=0.1, \n quantization_loss_weight=0.1,\n)\n\nimage_feats = torch.randn(1, 16, 32, 32)\n\nquantized, indices, loss = quantizer(image_feats)\n\n# (1, 16, 32, 32), (1, 32, 32), ()\n\nassert image_feats.shape == quantized.shape\nassert (quantized == quantizer.indices_to_codes(indices)).all()\n```\n\nYou can also pass in video features as `(batch, feat, time, height, width)` or sequences as `(batch, seq, feat)`\n\n```python\n\nimport torch\nfrom vector_quantize_pytorch import LatentQuantize\n\nquantizer = LatentQuantize(\n levels = [5, 5, 8],\n dim = 16,\n commitment_loss_weight=0.1, \n quantization_loss_weight=0.1,\n)\n\nseq = torch.randn(1, 32, 16)\nquantized, *_ = quantizer(seq)\n\n# (1, 32, 16)\n\nvideo_feats = torch.randn(1, 16, 10, 32, 32)\nquantized, *_ = quantizer(video_feats)\n\n# (1, 16, 10, 32, 32)\n\n```\n\nOr support multiple codebooks\n\n```python\nimport torch\nfrom vector_quantize_pytorch import LatentQuantize\n\nmodel = LatentQuantize(\n levels = [4, 8, 16],\n dim = 9,\n num_codebooks = 3\n)\n\ninput_tensor = torch.randn(2, 3, dim)\noutput_tensor, indices, loss = model(input_tensor)\n\n# (2, 3, 9), (2, 3, 3), ()\n\nassert output_tensor.shape == input_tensor.shape\nassert indices.shape == (2, 3, num_codebooks)\nassert loss.item() \u003e= 0\n```\n\n## Citations\n\n```bibtex\n@misc{oord2018neural,\n title = {Neural Discrete Representation Learning},\n author = {Aaron van den Oord and Oriol Vinyals and Koray Kavukcuoglu},\n year = {2018},\n eprint = {1711.00937},\n archivePrefix = {arXiv},\n primaryClass = {cs.LG}\n}\n```\n\n```bibtex\n@misc{zeghidour2021soundstream,\n title = {SoundStream: An End-to-End Neural Audio Codec},\n author = {Neil Zeghidour and Alejandro Luebs and Ahmed Omran and Jan Skoglund and Marco Tagliasacchi},\n year = {2021},\n eprint = {2107.03312},\n archivePrefix = {arXiv},\n primaryClass = {cs.SD}\n}\n```\n\n```bibtex\n@inproceedings{anonymous2022vectorquantized,\n title = {Vector-quantized Image Modeling with Improved {VQGAN}},\n author = {Anonymous},\n booktitle = {Submitted to The Tenth International Conference on Learning Representations },\n year = {2022},\n url = {https://openreview.net/forum?id=pfNyExj7z2},\n note = {under review}\n}\n```\n\n```bibtex\n@inproceedings{lee2022autoregressive,\n title={Autoregressive Image Generation using Residual Quantization},\n author={Lee, Doyup and Kim, Chiheon and Kim, Saehoon and Cho, Minsu and Han, Wook-Shin},\n booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},\n pages={11523--11532},\n year={2022}\n}\n```\n\n```bibtex\n@article{Defossez2022HighFN,\n title = {High Fidelity Neural Audio Compression},\n author = {Alexandre D'efossez and Jade Copet and Gabriel Synnaeve and Yossi Adi},\n journal = {ArXiv},\n year = {2022},\n volume = {abs/2210.13438}\n}\n```\n\n```bibtex\n@inproceedings{Chiu2022SelfsupervisedLW,\n title = {Self-supervised Learning with Random-projection Quantizer for Speech Recognition},\n author = {Chung-Cheng Chiu and James Qin and Yu Zhang and Jiahui Yu and Yonghui Wu},\n booktitle = {International Conference on Machine Learning},\n year = {2022}\n}\n```\n\n```bibtex\n@inproceedings{Zhang2023GoogleUS,\n title = {Google USM: Scaling Automatic Speech Recognition Beyond 100 Languages},\n author = {Yu Zhang and Wei Han and James Qin and Yongqiang Wang and Ankur Bapna and Zhehuai Chen and Nanxin Chen and Bo Li and Vera Axelrod and Gary Wang and Zhong Meng and Ke Hu and Andrew Rosenberg and Rohit Prabhavalkar and Daniel S. Park and Parisa Haghani and Jason Riesa and Ginger Perng and Hagen Soltau and Trevor Strohman and Bhuvana Ramabhadran and Tara N. Sainath and Pedro J. Moreno and Chung-Cheng Chiu and Johan Schalkwyk and Franccoise Beaufays and Yonghui Wu},\n year = {2023}\n}\n```\n\n```bibtex\n@inproceedings{Shen2023NaturalSpeech2L,\n title = {NaturalSpeech 2: Latent Diffusion Models are Natural and Zero-Shot Speech and Singing Synthesizers},\n author = {Kai Shen and Zeqian Ju and Xu Tan and Yanqing Liu and Yichong Leng and Lei He and Tao Qin and Sheng Zhao and Jiang Bian},\n year = {2023}\n}\n```\n\n```bibtex\n@inproceedings{Yang2023HiFiCodecGV,\n title = {HiFi-Codec: Group-residual Vector quantization for High Fidelity Audio Codec},\n author = {Dongchao Yang and Songxiang Liu and Rongjie Huang and Jinchuan Tian and Chao Weng and Yuexian Zou},\n year = {2023}\n}\n```\n\n```bibtex\n@article{Liu2023BridgingDA,\n title = {Bridging Discrete and Backpropagation: Straight-Through and Beyond},\n author = {Liyuan Liu and Chengyu Dong and Xiaodong Liu and Bin Yu and Jianfeng Gao},\n journal = {ArXiv},\n year = {2023},\n volume = {abs/2304.08612}\n}\n```\n\n```bibtex\n@inproceedings{huh2023improvedvqste,\n title = {Straightening Out the Straight-Through Estimator: Overcoming Optimization Challenges in Vector Quantized Networks},\n author = {Huh, Minyoung and Cheung, Brian and Agrawal, Pulkit and Isola, Phillip},\n booktitle = {International Conference on Machine Learning},\n year = {2023},\n organization = {PMLR}\n}\n```\n\n```bibtex\n@inproceedings{rogozhnikov2022einops,\n title = {Einops: Clear and Reliable Tensor Manipulations with Einstein-like Notation},\n author = {Alex Rogozhnikov},\n booktitle = {International Conference on Learning Representations},\n year = {2022},\n url = {https://openreview.net/forum?id=oapKSVM2bcj}\n}\n```\n\n```bibtex\n@misc{shin2021translationequivariant,\n title = {Translation-equivariant Image Quantizer for Bi-directional Image-Text Generation},\n author = {Woncheol Shin and Gyubok Lee and Jiyoung Lee and Joonseok Lee and Edward Choi},\n year = {2021},\n eprint = {2112.00384},\n archivePrefix = {arXiv},\n primaryClass = {cs.CV}\n}\n```\n\n```bibtex\n@misc{mentzer2023finite,\n title = {Finite Scalar Quantization: VQ-VAE Made Simple},\n author = {Fabian Mentzer and David Minnen and Eirikur Agustsson and Michael Tschannen},\n year = {2023},\n eprint = {2309.15505},\n archivePrefix = {arXiv},\n primaryClass = {cs.CV}\n}\n```\n\n```bibtex\n@misc{yu2023language,\n title = {Language Model Beats Diffusion -- Tokenizer is Key to Visual Generation},\n author = {Lijun Yu and José Lezama and Nitesh B. Gundavarapu and Luca Versari and Kihyuk Sohn and David Minnen and Yong Cheng and Agrim Gupta and Xiuye Gu and Alexander G. Hauptmann and Boqing Gong and Ming-Hsuan Yang and Irfan Essa and David A. Ross and Lu Jiang},\n year = {2023},\n eprint = {2310.05737},\n archivePrefix = {arXiv},\n primaryClass = {cs.CV}\n}\n```\n\n```bibtex\n@inproceedings{Zhao2024ImageAV,\n title = {Image and Video Tokenization with Binary Spherical Quantization},\n author = {Yue Zhao and Yuanjun Xiong and Philipp Krahenbuhl},\n year = {2024},\n url = {https://api.semanticscholar.org/CorpusID:270380237}\n}\n```\n\n```bibtex\n@misc{hsu2023disentanglement,\n title = {Disentanglement via Latent Quantization}, \n author = {Kyle Hsu and Will Dorrell and James C. R. Whittington and Jiajun Wu and Chelsea Finn},\n year = {2023},\n eprint = {2305.18378},\n archivePrefix = {arXiv},\n primaryClass = {cs.LG}\n}\n```\n","description_content_type":"text/markdown","author_email":"Phil Wang \[email protected]\u003e","license":"MIT License Copyright (c) 2020 Phil Wang Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.","keywords":"artificial intelligence, deep learning, pytorch, quantization","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3.6","Topic :: Scientific/Engineering :: Artificial Intelligence"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["einops\u003e=0.8.0","einx\u003e=0.2.2","torch\u003e=2.0","torchvision; extra == \"examples\"","tqdm; extra == \"examples\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Homepage","Repository"],"uploaded_via":"twine/5.1.1 CPython/3.9.19","upload_time":"2024-07-01 14:29:44.196116 UTC","filename":"vector_quantize_pytorch-1.14.45.tar.gz","size":"590587","path":"f3/83/2574ca9867763fe51e2c4bb573d415427a55884313450a91d3e1b822a12a/vector_quantize_pytorch-1.14.45.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"3a976b1515e686816ab624585808230f","sha256_digest":"b2857e30d6f11851f53d0387bfbcc500a7deeb5440046b55b6d7f6a97c51433d","blake2_256_digest":"f3832574ca9867763fe51e2c4bb573d415427a55884313450a91d3e1b822a12a","license_files":[]}
|
4 |
-
{"name":"nodespecs","version":"0.0.17","summary":"The specs summarize utilities for computer instance","description":"# hardwareSummary\r\nExtracting and Fetching all system and hardware information such as os details, CPU and GPU information, disk and network usage in Python using platform, psutil and gputil libraries.\r\n\r\n\r\n\r\n### install and use with pip\r\n\r\n```shell\r\npip install nodespecs\r\npython -m specs\r\npython -c \"import specs; specs.bench_cpu()\"\r\npython -c \"import specs; specs.info_gpu()\"\r\n```\r\n\r\n```shell\r\n# for the machine not compatible with psutil\r\npython -m specs -l=1\r\n\r\npython -m specs -u=\"bcpu\"\r\n```\r\n\r\n```shell\r\n## server\r\npython -c \"import specs; specs.whoish()\"\r\npython -c \"import specs; specs.server()\"\r\n## client upload wt progress bar\r\npython -c \"import specs; specs.client('172.25.1.175', 12345,'./README.md',False)\"\r\n\r\n## client upload wo progress bar\r\npython -c \"import specs; specs.client('172.25.1.175', 12345,'./README.md',False)\"\r\n```\r\n\r\n```\r\nsudo apt update\r\nsudo apt install python3-pip\r\npython3 -m pip install nodespecs \u0026\u0026 python3 -m specs\r\n```\r\n\r\n\r\n\r\n```python\r\nimport specs\r\n\r\nspecs.info_gpu()\r\nspecs.bench_cpu()\r\n```\r\n\r\n\r\n\r\n\r\n\r\n#### Deprecated (install and use with git)\r\n\r\n```\r\n!git clone https://github.com/jinsanity07git/hardwareSummary \u0026\u0026 python hardwareSummary/hardware.py \u0026\u0026 python hardwareSummary/cpu-benchmark.py\r\n\r\n```\r\n\r\n```cmd\r\ngit clone https://github.com/jinsanity07git/hardwareSummary \u0026\u0026 python hardwareSummary/hardware.py \u0026\u0026 python hardwareSummary/cpu-benchmark.py\r\n\r\n```\r\n\r\n```bash\r\nsudo apt upgrade\r\nsudo apt install python3-pip\r\ngit clone https://github.com/jinsanity07git/hardwareSummary \u0026\u0026 python3 hardwareSummary/hardware.py \u0026\u0026 python3 hardwareSummary/cpu-benchmark.py\r\n```\r\n\r\n\r\n\r\n### CPU collection\r\n\r\n\r\n| Nickname | CPU | Arch | OS | Benchmarking | Comb | Score |\r\n| ------------------------------------- | ---------------------------------------------- | ------- | ------------- | ------------ | ------------------------------------------------------------ | ----- |\r\n| TC14 | 13th Gen Intel(R) Core(TM) i9-13900K | AMD64 | Windows 10 | 12.991 | Core-i9-13900K | 38.76 |\r\n| TC17\u003cbr /\u003eTC16 | Intel(R) Core(TM) i9-14900KF | AMD64 | Windows | 15.654 | Core-i9-14900KF | 39.25 |\r\n| iPhone 14 Pro Max\u003cbr /\u003eiPhone15,3[^3] | Apple A16 Bionic[^2] | arm64e | Darwin 23.4.0 | 15.962 | | |\r\n| TC19 | Intel(R) Core(TM) i7-9700K CPU @ 3.60GHz | AMD64 | Windows 10 | 20.534 | Core-i7-9700K | 9.45 |\r\n| Dell Precision 3561 | 11th Gen Intel(R) Core(TM) i7-11800H @ 2.30GHz | AMD64 | Windows | 23.852 | Core-i7-11800H | 13.47 |\r\n| TC07 | Intel(R) Core(TM) i7-9700K CPU @ 3.60GHz | AMD64 | Windows | 26.723 | Core-i7-9700K | 9.45 |\r\n| oracle cloudshell | ARM Cortex-A53 | aarch64 | Linux | 27.489 | | |\r\n| AWS `t2.micro` | Intel(R) Xeon(R) CPU E5-2676 v3 @ 2.40GHz | x86_64 | Linux | 27.785 | [Core-i7-9700K](https://technical.city/en/cpu/Core-i7-9700K) | 8.81 |\r\n| google cloudshell | Intel(R) Xeon(R) CPU @ 2.20GHz | x86_64 | Linux | 29.818 | | |\r\n| WUYING: 8 vCPU / 16 GiB Linux | Intel(R) Xeon(R) Platinum 8163 CPU @ 2.50GHz | x86_64 | Linux | 33.572 | [Xeon-Platinum-8163](https://versus.com/en/intel-xeon-gold-6126-vs-intel-xeon-platinum-8168) | |\r\n| TC03\u003cbr /\u003eTC11 | Intel(R) Xeon(R) Gold 6248R CPU @ 3.00GHz | AMD64 | Window | 34.612 | Xeon-Gold-6248R | 23.26 |\r\n| 2018 Macbook pro | Intel(R) Core(TM) i7-8559U CPU @ 2.70GHz | x86_64 | Darwin 22.1.0 | 37.105 | [Core-i7-8559U](https://technical.city/en/cpu/Core-i7-8559U) | 5.38 |\r\n| JVM | Intel(R) Xeon(R) Gold 6126 CPU @ 2.60GHz | x86_64 | Linux | 38.685 | [Xeon-Gold-6126](https://technical.city/en/cpu/Xeon-Gold-6126) | 12.21 |\r\n| TC01 | Intel(R) Xeon(R) CPU E5-2643 v4 @ 3.40GHz | AMD64 | Windows | 39.258 | Xeon-E5-2643-v4 | 7.62 |\r\n| Jquant | Intel(R) Xeon(R) Platinum 8163 CPU @ 2.50GHz | x86_64 | Linux | 40.128 | Xeon-Platinum-8163 | |\r\n| google colab free tier | Intel(R) Xeon(R) CPU @ 2.20GHz | x86_64 | Linux | 43.078 | | |\r\n| aws cloudshell | Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz | x86_64 | Linux | 49.396 | | |\r\n| JVM | Intel(R) Xeon(R) Gold 6126 CPU @ 2.60GHz | AMD64 | Windows | 62.969 | | |\r\n| serv00-FreeBSD | Intel(R) Xeon(R) Silver 4214R CPU @ 2.40GHz | AMD64 | Linux | 75.571 | | |\r\n| Oracle 1G-1G-0.5Gbps | AMD EPYC 7551 32-Core Processor | x86_64 | Linux | 98.732 | EPYC-7551 | 14.67 |\r\n| mini PC | Intel(R) Atom(TM) x5-Z8350 CPU @ 1.44GHz | x86_64 | Linux | 135.107 | Atom-x5-Z8350 | 0.57 |\r\n\r\n* Note\r\n * Kinds of Arch explanation[^1] \r\n\r\n### GPU collection\r\n\r\n| id | name | total memory | Synthetic benchmark | CUDA API |\r\n| ----- | ----------------------- | ------------ | ------------------- | -------- |\r\n| 17 | NVIDIA GeForce RTX 4060 | 8188.0MB | 50.69 | NA |\r\n| colab | Tesla T4 | 15360.0MB | 28.16 | 70627 |\r\n| dell | NVIDIA T600 Laptop GPU | 4096.0MB | 16.69 | 26600 |\r\n| 01 | Quadro M4000 | 8192.0MB | 17.27 | 16648 |\r\n\r\n\r\n\r\n\r\n## Feature in develop\r\n1. Streaming upload server in Python extended from [uploadserver](https://github.com/Densaugeo/uploadserver).\r\n2. work through [ws](https://websockets.readthedocs.io/en/stable/intro/index.html), transfer file using `websocket`\r\n3. [py-ios-device](https://github.com/YueChen-C/py-ios-device) python based Apple instruments protocol,you can get CPU, Memory and other metrics from real iOS devices\r\n\r\n\r\n\r\n\r\n\r\nPerformance source\r\n\r\n* https://browser.geekbench.com/\r\n * \r\n* https://technical.city/en/video/GeForce-RTX-4060-vs-Tesla-T4\r\n* https://technical.city/en/video/Tesla-T4-vs-T600\r\n\r\n[^1]: [mainstream CPU architecture](https://jinsanity07git.github.io/post/mainstream%20CPU%20architecture.html)\r\n[^2]: [Apple A16](https://en.wikipedia.org/wiki/Apple_A16) \r\n[^3]: [apple ios devices name](https://www.innerfence.com/howto/apple-ios-devices-dates-versions-instruction-sets)\r\n\r\n\r\n\r\n","description_content_type":"text/markdown","author_email":"jinsanity \[email protected]\u003e","license":"The MIT License (MIT) Copyright © 2024 \u003ccopyright holders\u003e Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.","keywords":"cpu, gpu, benchmark","classifiers":["License :: OSI Approved :: MIT License","Programming Language :: Python","Programming Language :: Python :: 3"],"platform":[],"requires_python":"\u003e=3.6","requires":[],"provides":[],"obsoletes":[],"requires_dist":["tabulate\u003e=0.8.0","GPUtil; extra == \"dev\"","pip-tools; extra == \"dev\"","psutil\u003e=5.9.5; extra == \"dev\"","toml; extra == \"dev\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Homepage"],"uploaded_via":"twine/5.0.0 CPython/3.11.0","upload_time":"2024-05-10 14:25:25.692005 UTC","filename":"nodespecs-0.0.17-py3-none-any.whl","size":"37353","path":"e9/42/3a2371b06e3e369ecb7cf1000a371e30995c81167ee73d562d5c1a37be3a/nodespecs-0.0.17-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"87cf68e320946f2ec9022c6525b42358","sha256_digest":"2872050708321e454229bc1c588aced55ce9d8dffb50d320354a627e92739432","blake2_256_digest":"e9423a2371b06e3e369ecb7cf1000a371e30995c81167ee73d562d5c1a37be3a","license_files":[]}
|
5 |
-
{"name":"balsamic","version":"0.0.11","summary":"Send malicious pickles via requests or sockets","description":"\n# Balsamic \nbalsamic is a library for sending malicious pickles to a vunlerable application, via web requests, or a malicious server or client(currently ipv4 only). \nwe will add more payloads but for now we just execute shell commands. via the oscmd payload. \n\n## useage (standalone) \nweb request mode \n```\nusage: balsamic.py webreq [-h] -s {http,https} [-m METHOD] -rh RHOST -rp RPORT [-p PARAMETER] [-co COOKIE] -P PAYLOAD [-c COMMAND]\n\noptions:\n -h, --help show this help message and exit\n -s {http,https}, --schema {http,https}\n -m METHOD, --method METHOD\n -rh RHOST, --rhost RHOST\n -rp RPORT, --rport RPORT\n -p PARAMETER, --parameter PARAMETER\n -co COOKIE, --cookie COOKIE\n -P PAYLOAD, --payload PAYLOAD\n -c COMMAND, --command COMMAND\n```\nsocksend mode \n```\nusage: balsamic.py socksend [-h] -rh RHOST -rp RPORT -P PAYLOAD [-c COMMAND] [-s STEPS]\n\noptions:\n -h, --help show this help message and exit\n -rh RHOST, --rhost RHOST\n -rp RPORT, --rport RPORT\n -P PAYLOAD, --payload PAYLOAD\n -c COMMAND, --command COMMAND\n -s STEPS, --steps STEPS\n```\nsocklisten mode\n```\nusage: balsamic.py socklisten [-h] -lp LPORT -P PAYLOAD [-c COMMAND]\n\noptions:\n -h, --help show this help message and exit\n -lp LPORT, --lport LPORT\n -P PAYLOAD, --payload PAYLOAD\n -c COMMAND, --command COMMAND\n```\n\n## useage (library)\n```\nbalsamic.utility.command=\"command\"\nbalsamic.webreq(\"schema\",\"method\",\"rhost\",\"rport\",\"payload\",\"parameter\",\"cookie\")\nbalsamic.socksend(\"ip\",port,\"payload\",steps)\nbalsamic.socklisten(port,\"payload\",steps)\n```\n","description_content_type":"text/markdown","author":"Witchdoctor (malectrica)","keywords":"python, hack, pickle, serialization, security, sockets, web","classifiers":["Development Status :: 5 - Production/Stable","Intended Audience :: Developers","Operating System :: MacOS :: MacOS X","Operating System :: Microsoft :: Windows","Operating System :: Unix","Programming Language :: Python :: 3"],"platform":[],"requires":[],"provides":[],"obsoletes":[],"requires_dist":["pickle5","base64","requests","socket","argparse"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"twine/5.1.0 CPython/3.11.4","upload_time":"2024-05-17 19:52:12.856806 UTC","filename":"balsamic-0.0.11.tar.gz","size":"2017","path":"ec/17/e1d768705c9ae55a5fffbaeb8f090b101708a9630941aac7f4dc95beed0c/balsamic-0.0.11.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"03f7e5fe42e1d89df3725f7e74bf15af","sha256_digest":"58f8e4032577513dd919601ae2ef919ac13bf754c82c2ef07c975936a4043abc","blake2_256_digest":"ec17e1d768705c9ae55a5fffbaeb8f090b101708a9630941aac7f4dc95beed0c","license_files":[]}
|
6 |
-
{"name":"vsag","version":"0.1.2","summary":"VSAG Python Binding","description":"# VSAG Python Binding\n\n[](https://github.com/jiacai2050/vsag-py/actions/workflows/CI.yml)\n[](https://pypi.org/project/vsag)\n\n[VSAG](https://github.com/alipay/vsag) is a vector indexing library used for similarity search.\n\n## Installation\n\n```bash\npip install vsag\n```\n\n## Development\n\n```\npython -m venv .env\nsource .env/bin/activate\npip install maturin\npip install maturin[patchelf]\n```\n\nUseful maturin commands:\n```\n build Build the crate into python packages\n publish Build and publish the crate as python packages to pypi\n develop Install the crate as module in the current virtualenv\n```\n\n","description_content_type":"text/markdown; charset=UTF-8; variant=GFM","author_email":"Jiacai Liu \[email protected]\u003e","license":"Apache-2.0","keywords":"vector, search, llm, knn, hnsw, diskann","classifiers":["Intended Audience :: Developers","Operating System :: OS Independent","Programming Language :: Python :: Implementation :: CPython","Programming Language :: Python :: Implementation :: PyPy","Programming Language :: Rust","Topic :: Software Development"],"platform":[],"requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":[],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Homepage","Repository"],"uploaded_via":"maturin/1.7.4","upload_time":"2024-11-11 10:34:28.167759 UTC","filename":"vsag-0.1.2-cp312-cp312-manylinux_2_28_x86_64.whl","size":"50000776","path":"f4/32/0de926de7917b91459849ec671c873680bb0aacd161f09c2dc08b76fee4d/vsag-0.1.2-cp312-cp312-manylinux_2_28_x86_64.whl","python_version":"cp312","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"b94347cfbbad63d3c2be5d1b50b6877d","sha256_digest":"768fff9dcdd952d3b65a67946b85e4bdb89cf794eaffe230433f196019e19fe0","blake2_256_digest":"f4320de926de7917b91459849ec671c873680bb0aacd161f09c2dc08b76fee4d","license_files":[]}
|
7 |
-
{"name":"smaht-submitr","version":"0.8.2.1b51","summary":"Support for uploading file submissions to SMAHT.","description":"\n.. image:: https://staging.smaht.org/static/img/docs/submitr_logo.png\n :target: https://pypi.org/project/smaht-submitr/\n :alt: SMaHT remote Metadata Submission Tool: submitr\n :align: left\n\n\n|\n\n\n------------\n\n==============\nsmaht-submitr\n==============\n\n\nA file submission tool for SMaHT\n================================\n\n.. image:: https://github.com/smaht-dac/submitr/actions/workflows/main.yml/badge.svg\n :target: https://github.com/smaht-dac/submitr/actions\n :alt: Build Status\n\n.. image:: https://coveralls.io/repos/github/smaht-dac/submitr/badge.svg\n :target: https://coveralls.io/github/smaht-dac/submitr\n :alt: Coverage Percentage\n\n.. image:: https://readthedocs.org/projects/submitr/badge/?version=draft\n :target: https://submitr.readthedocs.io/en/draft/?badge=draft\n :alt: Documentation Status\n\n\nDescription\n===========\n\nThis is a tool for uploading certain kinds of files to SMaHT.\nThe \"R\" is for Remote file submission. You can think of this tool as putting the \"R\" in \"SMaHT\". :)\n\nPlease see our detailed documentation here: `SMaHT Submitr \u003chttps://submitr.readthedocs.io/en/draft/\u003e`_\n\n\nBackground\n==========\n\nThis tool was forked from SubmitCGAP and will probably remain compatible, but by forking it, the original repository will remain stable and this new repository can experiment safely.\n\nBecause SubmitCGAP supported submission of new cases, family histories, and gene lists, that's what this begins with. But that doesn't imply that those things are present in SMaHT. The protocol is designed to require both ends to agree on the availability of a particular kind of upload for it to happen.\n\n\nInstallation\n============\n\nInstalling this system involves these steps:\n\n1. Install Python and optionally a virtual environment manager of your choice (e.g. ``pyenv``)..\n2. Install this package with: ``pip install smaht-submitr``\n3. Setup your SMaHT Portal credentials file: ``~/.smaht-keys.json``. See `SMaHT Submitr Credentials \u003chttps://submitr.readthedocs.io/en/draft/installation.html\u003e`_ for more in this.\n\nSee detailed information about installation see: `Installing SMaHT Submitr \u003chttps://submitr.readthedocs.io/en/draft/installation.html\u003e`_.\n\n\nGetting Started\n===============\n\nOnce you have finished installing this library into your virtual environment,\nyou should have access to the ``submit-metadata-bundle``, ``resume-uploads``, and ``check-submissions``\ncommands. For more information about how to format files for submission and how to\nuse these commands, see `Getting Started with SMaHT Submitr \u003chttps://submitr.readthedocs.io/en/draft/usage.html\u003e`_.\n","description_content_type":"text/x-rst","author":"SMaHT DAC","author_email":"[email protected] ","license":"MIT","keywords":"submitr, smaht","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Database :: Database Engines/Servers"],"platform":[],"home_page":"https://github.com/smaht-dac/submitr","requires_python":"\u003c3.13,\u003e=3.8.0","requires":[],"provides":[],"obsoletes":[],"requires_dist":["awscli\u003e=1.32.112","boto3\u003c2.0.0,\u003e=1.34.112","dcicutils==8.9.0.1b6","PyYAML\u003c7.0.0,\u003e=6.0.1","requests\u003c3.0.0,\u003e=2.31.0","googleapi\u003c0.2.0,\u003e=0.1.0"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Repository"],"uploaded_via":"poetry/1.4.2 CPython/3.9.19 Linux/6.5.0-1021-azure","upload_time":"2024-06-09 19:18:27.897082 UTC","filename":"smaht_submitr-0.8.2.1b51.tar.gz","size":"235422","path":"3f/e4/f6c2ff538226d3f3cf7d4b265818de60b2a6a1b05ee162b43fa447f8dc52/smaht_submitr-0.8.2.1b51.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"cca1c5c1318e3979b70c1396bf45a834","sha256_digest":"bc624bed437eafd9db46306487b62807bb91e45f56daee9d4bd097dee84fd69b","blake2_256_digest":"3fe4f6c2ff538226d3f3cf7d4b265818de60b2a6a1b05ee162b43fa447f8dc52","license_files":[]}
|
8 |
-
{"name":"mkdocs-addresses","version":"0.3.1","summary":"Mkdocs automatic paths/addresses building - auto-completion support (VSC)","description":"\n \n\n\n## Links\n\n* [Project repository (GitLab)](https://gitlab.com/frederic-zinelli/mkdocs-addresses)\n* [Full online documentation](http://frederic-zinelli.gitlab.io/mkdocs-addresses/)\n* [The project on PyPI](https://pypi.org/project/mkdocs-addresses/)\n\n\n\n## Dependencies\n\n* Python 3.8+\n* mkdocs 1.4+\n* BeautifulSoup 4+\n\n\n\n## Overview\n\n### About\n\nThe [`mkdocs-addresses`](https://pypi.org/project/mkdocs-addresses/) is a plugin for `mkdocs` which offers:\n\n* Abstraction of the concrete tree hierarchy of pages and anchors within those when writing a link, utilizing unique identifiers:\n\n Benefit from a strong separation between logic and content, avoiding all addresses rewrite steps when some files are modified, split, merged or moved.\n \u003cbr\u003e\n\n* Verification of numerous links and addresses to ensure the absence of dead links or images within the documentation (including verifications beyond mkdocs 1.5+ capabilities):\n\n The tool warns you when something becomes wrong during development.\n \u003cbr\u003e\n\n* Convenient helpers to facilitate the usage of those identifiers within the docs pages. For users working with compatible IDEs, this translates to the availability of auto-completion features:\n\n Don't lose time searching for the exact name of the anchor in the file that is... where is it again? Let the autocompletion tool find them for you.\n \u003cbr\u003e\n\n\n\n### Identifiers: separating structure from content\n\nRelying on the `attr_list` markdown extension, use identifiers instead of actual paths to point to specific anchors in the documentation:\n\n```code\n## Very important title with anchor and id {: #point-here }\n```\n\n```code\nIn another file: navigate to [this very important title](--point-here).\n```\n\nThe plugin automatically rebuilds the appropriate addresses, considering various factors such as the source file location, the target, the `use_directory_urls` option, ...\n\n\n### Reduce dependencies on the files hierarchy\n\nIdentifiers still work after:\n- Changing header content\n- Moving sections from one file to another\n- Renaming files\n- Moving files\n\n\n\n\n### Provide [autocompletion helpers](http://frederic-zinelli.gitlab.io/mkdocs-addresses/autocompletion/) (_IDE dependent_)\n\n_(Currently only available for VSC-like IDEs)_\n\n* All snippets are automatically kept up to date while working on the documentation.\n* They provide various markdown snippets, to get a quick and easy access to all the references defined in the documentation, and use them within the markdown code they are usual used for.\n\n| Kind | Suggestion completion | Inserted markdown |\n|:-|:-|:-|\n| Doc identifier | `--point-here` | `--point-here` |\n| Doc links | `Link.point-here` | `[link to some place in the docs](--point-here)` |\n| Images in `assets/` (identifier) | `!!file_in_assets_jpg` | `!!file_in_assets_jpg` |\n| Images in `assets/` | `Img.file_in_assets_jpg` | `` |\n| Other files links | `++file_path_in_docs_html` | `++file_path_in_docs_html` |\n| Other files links | `File.file_path_in_docs_html` | `[link to a file](++file_path_in_docs_html)` |\n| External Links \u003csup\u003e\\*\u003c/sup\u003e | `Ext.global_ref` | `[global_ref][global_ref]` |\n| Code inclusions\u003csup\u003e\\*\\*\u003c/sup\u003e | `::md that_file_md` | `--\u003c8-- \"include/that_file.md\"` |\n\n\n\\*: requires an [external_links_file](http://frederic-zinelli.gitlab.io/mkdocs-addresses/configuration/#mkdocs_addresses.config_plugin.PluginOptions.external_links_file) for global references is configured.\n\n\\*\\*: requires the use of [inclusions](http://frederic-zinelli.gitlab.io/mkdocs-addresses/configuration/#mkdocs_addresses.config_plugin.PluginOptions.inclusions) directories.\n\n\n\n\n\n\n### Tracking dead links or addresses in the docs\n\nThe plugin also explores the documentation and warns you if it finds invalid addresses or identifiers. This works for:\n\n- Addresses in links\n- Addresses of images\n- Identifiers used by the plugin\n\n\n\n\n### User handed configuration\n\nA lot of [options](http://frederic-zinelli.gitlab.io/mkdocs-addresses/configuration/) are available for the user to fine tune the plugin's behavior.\n\n\n\n\n\n## Installation\n\nInstall the package on your machine (or in your project if you are using a virtual env):\n\n```\npip install mkdocs-addresses\n```\n\nRegister the plugin in the `mkdocs.yml` file:\n\n```yaml\nplugins:\n - search # To redeclare when plugins are added to mkdocs.yml\n - mkdocs-addresses\n```\n\nConfigure the plugin (see below).\n\n\n\n\n### Recommended `mkdocs.yml` configuration\n\nSee the [online documentation](http://frederic-zinelli.gitlab.io/mkdocs-addresses/#installation) for more details.\n\n#### Markdown extensions\n\n```yaml\nmarkdown_extensions:\n - attr_list # To define the identifiers in the markdown content\n - pymdownx.snippets: # If you need inclusions code snippets\n check_paths: true\n auto_append: [\"path_to_external_links_definition.md\"]\n # ^ see plugin's external_link_file configuration\n```\n\n#### Plugin configuration\n\n```yaml\nplugins:\n - search\n - mkdocs-addresses:\n - external_links_file: path_to_links_definition_if_any.md\n - inclusions:\n - location1_if_any\n - location2...\n```\n\nNote that the default configuration also implies the following choices:\n\n```yaml\n - dump_snippets_file: .vscode/links.code-snippets\n - fail_fast: false\n - ignore_auto_headers: true\n - ide: vsc\n```\nSo, if VSC isn't the utilized IDE, the [`ide`](http://frederic-zinelli.gitlab.io/mkdocs-addresses/configuration/#mkdocs_addresses.config_plugin.PluginOptions.ide) option should at the very least be modified.\n\n\n#### When using mkdocs 1.5+\n\nSignificant enhancements in address verification logic (which was notoriously lacking in earlier versions...) have been added in `mkdocs 1.5+`. But the plugin does more work, and the identifiers it is utilizing are generating warnings in the console.\n\nHence, deactivate the default verification logic for mkdocs 1.5+:\n\n```yaml\nvalidation:\n absolute_links: ignore\n unrecognized_links: ignore\n```\n\n\n## Links\n\n* [Project repository (GitLab)](https://gitlab.com/frederic-zinelli/mkdocs-addresses)\n* [Full online documentation](http://frederic-zinelli.gitlab.io/mkdocs-addresses/)\n* [The project on PyPI](https://pypi.org/project/mkdocs-addresses/)\n\n\n## License\n\n[Apache-2.0](https://www.tldrlegal.com/license/apache-license-2-0-apache-2-0)\nCopyright © 2023 Zinelli Frédéric","description_content_type":"text/markdown","author":"Frédéric Zinelli","author_email":"[email protected]","keywords":"mkdocs, mkdocs-plugin, links, autocompletion","classifiers":["Environment :: Plugins","Intended Audience :: Developers","License :: OSI Approved :: Apache Software License","Operating System :: Microsoft :: Windows","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Documentation","Topic :: Software Development","Topic :: Software Development :: Documentation","Topic :: Utilities"],"platform":[],"home_page":"http://frederic-zinelli.gitlab.io/mkdocs-addresses/","requires_python":"\u003c4.0,\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["mkdocs\u003c2.0,\u003e=1.4","beautifulsoup4\u003c5.0,\u003e=4.12"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Repository"],"uploaded_via":"poetry/1.4.2 CPython/3.8.15 Linux/6.5.0-26-generic","upload_time":"2024-04-08 15:18:01.045171 UTC","filename":"mkdocs_addresses-0.3.1-py3-none-any.whl","size":"115688","path":"d7/15/4340199a68b36ce8a46ba0e882b0741eb2b2c837e29bd88985d508b95e16/mkdocs_addresses-0.3.1-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"37bd946592999d9c73cebd2656b337c6","sha256_digest":"46c5e3f515b7d68c60c0039efdcf4af0814f04327826cdf420b00ead591a5028","blake2_256_digest":"d7154340199a68b36ce8a46ba0e882b0741eb2b2c837e29bd88985d508b95e16","license_files":[]}
|
9 |
-
{"name":"RepMan","version":"1.0.4","summary":"RepMan: Repository Manager (alias: Project Manager)","description":"\n\n\n\n[](https://ci.appveyor.com/project/d33pster/repman)\n\n# Overview\n\nRepMan or Repository Manager is written on python to serve as GitHub Repo Manager for the end users.\n\nHave a lot of repositories you've been working on? Is it a hassle? RepMan is your solution.\n\n## What RepMan offers\n\n- RepMan will organize all the github repos you have under one management.\n- RepMan will help you find your repositories and update them (add, commit and push the changes.)\n- RepMan will help you start working on your project right away with just one command.\n- More to come.\n\n## Requirements\n\n- python\u003e=3.9\n\n### Note\n\n- Currently supports only Visual Studio Code as the default editor.\n- In MacOS arm64 and Debian Linux aarch64, if vscode and git are not installed, it will be automatically installed using `-i` or `--init` option of RepMan.\n- In other Operating Systems, it is recommended to have Visual Studio Code and Git pre-installed.\n\n- Git Installations\n ```bash\n # for Debian Linux,\n sudo apt install git\n\n # for macOS, use homebrew or other package managers\n\n # for windows, download the windows installer from the git-scm website.\n\n # for installation using homebrew\n brew install git\n ```\n For Other Operating systems, visit the official site of [ [git](https://git-scm.com/downloads) ] to download respective supported versions of git.\n\n- Visual Studio Code Installation\n\n ```bash\n # if you have home brew\n brew install --cask visual-studio-code\n ```\n Else, Go to [ [Visual Studio Code](https://code.visualstudio.com/download) ] to download for your OS.\n\n- Supports all versions of Apple Laptops with Apple Silicon chip and arm64 architecture.\n\n### Git setup Note\n\nGit requires git credential manager to log in to your account so that you can clone private repositories. In windows, it is by default installed while installing git using the gui installer. But in other Operating Systems, it needs to be installed and configured manually.\n\nI'd suggest to use GitHub Cli instead to manage your credentials.\n\n- Installation:\n ```bash\n # using homebrew,\n brew install gh\n\n # or in debian Linux,\n sudo apt install gh\n ```\n- setup\n ```bash\n # run the following command in terminal and follow the steps \n gh auth login\n ```\n\n## Installation\n\nRepMan is very easy to install.\n\n```bash\n# install using pip\n\npip install RepMan\n```\n```bash\n# install by cloning this git repository.\n\ngit clone https://github.com/d33pster/RepMan.git\ncd RepMan\npip install .\n```\n\nAfter installation, run the following command. (make sure to install vscode and git if you're not using the supported OS.)\n```bash\nrepman -i\n\n# or\n\nrepman --init\n```\n\n## Usage\n\nFor usage, run\n\n```bash\nrepman -h\n\n# or\n\nrepman --help\n```\n\nFor option specific help, run\n\n```bash\nrepman \u003coption\u003e -h\n\n# or \n\nrepman \u003coption\u003e --help\n```\n\n## Usage screenshots\n\n\u003cimg src='images/update.png'\u003e\n\n## Supported OS and architectures and notes\n- MacOS (Apple Silicon Chip - M series) (Arch - arm64) (Requires Homebrew)\n- Linux (Debian) (Arch - aarch64)\n- **If your OS and arch is not listed here, just make sure to install VSCode and git on your own, rest is same.**\n\n## Uninstall\n\nUninstall using pip\n```bash\npip uninstall RepMan\n```\n","description_content_type":"text/markdown","author_email":"Soumyo Deep Gupta \[email protected]\u003e","maintainer_email":"Soumyo Deep Gupta \[email protected]\u003e","license":"MIT License Copyright (c) 2024 Soumyo Deep Gupta Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ","keywords":"d33pster, repman, RepMan, Repo Man, Repository Manager","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Topic :: Software Development"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["optioner\u003e=1.4.5","termcolor","gdown","tk","pandas","tabulate","requests"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["GitHub","Homepage","Issues"],"uploaded_via":"twine/5.0.0 CPython/3.9.19","upload_time":"2024-04-09 22:53:09.072137 UTC","filename":"RepMan-1.0.4-py3-none-any.whl","size":"14922","path":"7c/07/d99c037e524d620197c7f650cc01e210ba43c8e95bfc722545285fc30e00/RepMan-1.0.4-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"df4bdb1c701b18b72e2af382aece3e18","sha256_digest":"857c1e259e50458dbe5cea9a0dfeda18a406333b64a6e65893ce27da94547500","blake2_256_digest":"7c07d99c037e524d620197c7f650cc01e210ba43c8e95bfc722545285fc30e00","license_files":[]}
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5e78fa0d0fc15b4e12d0dadabe4cf53935f419583a64f8eddff845baccf15945
|
3 |
+
size 61137904
|
|
|
|
|
|
|
|
|
|
|
|
pypi-packages-metadata-000000000463.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9908541ae230a2d09b9e56bcc430658a8f2f538fa3a5323525b5082ae516c5cb
|
3 |
+
size 84419235
|
pypi-packages-metadata-000000000471.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000501.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000604.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000611.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000677.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000742.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000770.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000836.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000872.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000969.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000000993.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001028.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001029.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001055.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001072.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001081.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001083.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001148.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001181.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001248.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001263.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001324.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001343.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001362.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001363.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pypi-packages-metadata-000000001392.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|