notsahil commited on
Commit
d33c005
·
verified ·
1 Parent(s): 5e2468f

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +25 -0
  2. pypi-packages-metadata-000000000001.json +2 -2
  3. pypi-packages-metadata-000000000032.json +2 -2
  4. pypi-packages-metadata-000000000040.json +0 -0
  5. pypi-packages-metadata-000000000044.json +2 -2
  6. pypi-packages-metadata-000000000045.json +2 -2
  7. pypi-packages-metadata-000000000056.json +2 -2
  8. pypi-packages-metadata-000000000061.json +2 -2
  9. pypi-packages-metadata-000000000065.json +2 -2
  10. pypi-packages-metadata-000000000104.json +2 -2
  11. pypi-packages-metadata-000000000131.json +2 -2
  12. pypi-packages-metadata-000000000160.json +2 -2
  13. pypi-packages-metadata-000000000201.json +2 -2
  14. pypi-packages-metadata-000000000245.json +2 -2
  15. pypi-packages-metadata-000000000255.json +2 -2
  16. pypi-packages-metadata-000000000263.json +2 -2
  17. pypi-packages-metadata-000000000324.json +2 -2
  18. pypi-packages-metadata-000000000325.json +2 -2
  19. pypi-packages-metadata-000000000372.json +0 -0
  20. pypi-packages-metadata-000000000373.json +0 -0
  21. pypi-packages-metadata-000000000388.json +2 -2
  22. pypi-packages-metadata-000000000410.json +2 -2
  23. pypi-packages-metadata-000000000419.json +0 -0
  24. pypi-packages-metadata-000000000456.json +2 -2
  25. pypi-packages-metadata-000000000461.json +0 -0
  26. pypi-packages-metadata-000000000486.json +0 -0
  27. pypi-packages-metadata-000000000491.json +2 -2
  28. pypi-packages-metadata-000000000516.json +0 -0
  29. pypi-packages-metadata-000000000518.json +0 -0
  30. pypi-packages-metadata-000000000542.json +2 -2
  31. pypi-packages-metadata-000000000546.json +3 -4
  32. pypi-packages-metadata-000000000552.json +0 -0
  33. pypi-packages-metadata-000000000557.json +0 -0
  34. pypi-packages-metadata-000000000558.json +0 -0
  35. pypi-packages-metadata-000000000562.json +3 -14
  36. pypi-packages-metadata-000000000591.json +0 -0
  37. pypi-packages-metadata-000000000594.json +0 -0
  38. pypi-packages-metadata-000000000598.json +0 -0
  39. pypi-packages-metadata-000000000600.json +0 -0
  40. pypi-packages-metadata-000000000603.json +0 -0
  41. pypi-packages-metadata-000000000606.json +0 -0
  42. pypi-packages-metadata-000000000608.json +0 -0
  43. pypi-packages-metadata-000000000617.json +0 -0
  44. pypi-packages-metadata-000000000626.json +0 -0
  45. pypi-packages-metadata-000000000627.json +0 -0
  46. pypi-packages-metadata-000000000629.json +0 -0
  47. pypi-packages-metadata-000000000631.json +0 -0
  48. pypi-packages-metadata-000000000632.json +0 -0
  49. pypi-packages-metadata-000000000633.json +0 -0
  50. pypi-packages-metadata-000000000646.json +0 -0
.gitattributes CHANGED
@@ -1716,3 +1716,28 @@ pypi-packages-metadata-000000001430.json filter=lfs diff=lfs merge=lfs -text
1716
  pypi-packages-metadata-000000001405.json filter=lfs diff=lfs merge=lfs -text
1717
  pypi-packages-metadata-000000002107.json filter=lfs diff=lfs merge=lfs -text
1718
  pypi-packages-metadata-000000001366.json filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1716
  pypi-packages-metadata-000000001405.json filter=lfs diff=lfs merge=lfs -text
1717
  pypi-packages-metadata-000000002107.json filter=lfs diff=lfs merge=lfs -text
1718
  pypi-packages-metadata-000000001366.json filter=lfs diff=lfs merge=lfs -text
1719
+ pypi-packages-metadata-000000001310.json filter=lfs diff=lfs merge=lfs -text
1720
+ pypi-packages-metadata-000000000461.json filter=lfs diff=lfs merge=lfs -text
1721
+ pypi-packages-metadata-000000000516.json filter=lfs diff=lfs merge=lfs -text
1722
+ pypi-packages-metadata-000000000562.json filter=lfs diff=lfs merge=lfs -text
1723
+ pypi-packages-metadata-000000001384.json filter=lfs diff=lfs merge=lfs -text
1724
+ pypi-packages-metadata-000000000373.json filter=lfs diff=lfs merge=lfs -text
1725
+ pypi-packages-metadata-000000001993.json filter=lfs diff=lfs merge=lfs -text
1726
+ pypi-packages-metadata-000000001354.json filter=lfs diff=lfs merge=lfs -text
1727
+ pypi-packages-metadata-000000000552.json filter=lfs diff=lfs merge=lfs -text
1728
+ pypi-packages-metadata-000000000419.json filter=lfs diff=lfs merge=lfs -text
1729
+ pypi-packages-metadata-000000000557.json filter=lfs diff=lfs merge=lfs -text
1730
+ pypi-packages-metadata-000000001387.json filter=lfs diff=lfs merge=lfs -text
1731
+ pypi-packages-metadata-000000000372.json filter=lfs diff=lfs merge=lfs -text
1732
+ pypi-packages-metadata-000000000486.json filter=lfs diff=lfs merge=lfs -text
1733
+ pypi-packages-metadata-000000001433.json filter=lfs diff=lfs merge=lfs -text
1734
+ pypi-packages-metadata-000000001308.json filter=lfs diff=lfs merge=lfs -text
1735
+ pypi-packages-metadata-000000001375.json filter=lfs diff=lfs merge=lfs -text
1736
+ pypi-packages-metadata-000000000040.json filter=lfs diff=lfs merge=lfs -text
1737
+ pypi-packages-metadata-000000001783.json filter=lfs diff=lfs merge=lfs -text
1738
+ pypi-packages-metadata-000000000546.json filter=lfs diff=lfs merge=lfs -text
1739
+ pypi-packages-metadata-000000001436.json filter=lfs diff=lfs merge=lfs -text
1740
+ pypi-packages-metadata-000000001697.json filter=lfs diff=lfs merge=lfs -text
1741
+ pypi-packages-metadata-000000000518.json filter=lfs diff=lfs merge=lfs -text
1742
+ pypi-packages-metadata-000000001903.json filter=lfs diff=lfs merge=lfs -text
1743
+ pypi-packages-metadata-000000000558.json filter=lfs diff=lfs merge=lfs -text
pypi-packages-metadata-000000000001.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50b080e07c883ae86e60896c55cc28da0e3eab3b3a6faea3b8f4d9dd31162015
3
- size 75449005
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e20e34124bf72efea1333e8120f09bfc286b652606a24f87508d05eac6f964a4
3
+ size 11450481
pypi-packages-metadata-000000000032.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:707202ff9d566c203a6e92d3a20039c30268d9d2b9b8c9da79cce5be6d4188e4
3
- size 83103177
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2b2a89f3dff8a71aa777ff9c035fbf3830d0baa1fc3107ef6dd21dbd344c272
3
+ size 48496966
pypi-packages-metadata-000000000040.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000044.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0acff90cae26b897c750d7c3f38096186ade7df53adbe341a6c9dcf54c7b5c89
3
- size 47979121
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49037140821e737ec08af176d5ce350aeb400e314fd04a0c33748a414b471109
3
+ size 45525
pypi-packages-metadata-000000000045.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27d0732452f56ca235965efc3b6ead02ea6afae3b8f0bfc1042c67a59bf330eb
3
- size 11956243
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c5f495fe5ab40f1398ed7d114e134096901137067d3683ddaf76ede97956942
3
+ size 16645919
pypi-packages-metadata-000000000056.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6d468fe9eeb9dac5af898f5cbc9c6a29bfea32f8b7603efbeee7f63cb69755db
3
- size 68722185
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4db03b666e59f3a3b4e3ec5f0b5386d280bada53fc2bf72c1406f16558e90b87
3
+ size 81771713
pypi-packages-metadata-000000000061.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:105e96644e7595a874b1fd3fb9160ab27776f22e0d1489f2b28d1e05bd8cf097
3
- size 78002031
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42ad23b421a2d0a086c10740d38866e9bb0dcc10d6cd89b5ea27f7edd0548c76
3
+ size 1094863
pypi-packages-metadata-000000000065.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4bdfc2d43ff0f4633385eff4d9183ed7e26a0f1de89993a90432500b6b39397a
3
- size 77299841
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f43309445dcfabfd44043ac139dc93dce263f0edae3fe20ea9e094f3c60e82e
3
+ size 73017281
pypi-packages-metadata-000000000104.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:201b9be8d7f44f979653fca53a60e56f6f54908523bdafbd1eb47660a712a5b5
3
- size 59020076
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:639556e4f8d18f0c8ac23f224d5654ec0b82c621f56002ff1e971b1c0e0a7f5a
3
+ size 58222542
pypi-packages-metadata-000000000131.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2276b304d389be1417d8e40beee3de57d03a6582081c17cd377ec3bc94f8858b
3
- size 56457720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb80775dc9560289cb4c0ceb21f3eafac99f6585530d392992345f3ad9a11e33
3
+ size 59585109
pypi-packages-metadata-000000000160.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf932533d17334ac01c2813a0cdf0dd5ac3ec65a07f700509a565078ddf82b3d
3
- size 60567235
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d7ea7ce43b0311073063a524e1dbea0abae92e8276f8cce4ece51ea4cce06a7
3
+ size 75591649
pypi-packages-metadata-000000000201.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:25969dfa4b3e0ae7463e370de07ce0c4ad78da025ff8f91dcfc8e77ee10bf41c
3
- size 85848189
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2866540cd452739fe98237d686b21c8a0d562f8f5c1d795910356c64a35e7f6
3
+ size 12112407
pypi-packages-metadata-000000000245.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c5958e38a5726d9295de59cd73a1b0778bef81f061f7cf95f0ca17726d505e68
3
- size 80225407
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc87612349a71701a6e86269ba39eb5ad4bfd5a9751a79cc11e510d8c575ac22
3
+ size 57421571
pypi-packages-metadata-000000000255.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:24f6dc3ae20171ed5f6f986783d1bfc837cb690cedf0e9c5db8b77b7fdfe622b
3
- size 71282291
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e03a17ba763afae3f4cb76c7987cabeee8ba2503b744a090488c58a1d124826
3
+ size 87790290
pypi-packages-metadata-000000000263.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1f862fea0c9f9013d772bf221ff8be8619f4a3e7f02e3f14a545bda59fb91cf1
3
- size 80158078
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3731d137d2bca77ca9d8a0f60ad097331ecca1263b9517c348d55b03c51229b
3
+ size 13767387
pypi-packages-metadata-000000000324.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fafabb509839da181173f9b4b49ba4b376f09a3572713bf7e29e79e18b29e8ec
3
- size 89859090
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7019689111c7e5133105808b1fca06538b21d639e58d51915907cff3c320df3
3
+ size 47736307
pypi-packages-metadata-000000000325.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:82eb47cc87bc716d7a27ef197fdc6fa8dbc642a0d1129808c2778cfaf90c52d0
3
- size 86376108
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc9e055e4ac6f8e03d3ebdeb5119e559a1ac65084e135750167f7467c34cd29f
3
+ size 14042183
pypi-packages-metadata-000000000372.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000373.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000388.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:960223a6313ceae42d23750619541774ef581774dc5851b1aaf6431377efff3d
3
- size 13225607
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2bf9ce5dfcf22c8a7a6da2fbd0e2ec6bb5e746b047fed4cda1aaa1fd287191cc
3
+ size 73148147
pypi-packages-metadata-000000000410.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6bca331f7fcdb3484c3c5000733a1947906b2b6fbe196f597beb090ec5a99db
3
- size 19455318
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52255481e82ac6b5576f440286ff662756952b57552680bdb26d2789dcb4a9ae
3
+ size 58777991
pypi-packages-metadata-000000000419.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000456.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dbcbc8609cfe10d92c9f00d6f4d1760d35661cdf6ff08fec247edfacf3a66ec3
3
- size 10950922
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c759b997d2ceb5e95551759ad2e0ea37ad4ac79d193d9863fcb3c05fe1c8b53c
3
+ size 62205324
pypi-packages-metadata-000000000461.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000486.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000491.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:28d578a43f7233c484dca5f3a284122d4f5605cd82fda72cde40d59d8000ab9d
3
- size 19624718
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a63446dac2ce9889cf3666d631a764f1989a699af5d13cfff642466a428d8d44
3
+ size 75329968
pypi-packages-metadata-000000000516.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000518.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000542.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:364ec3d721a1a4e34c30a4dde9cde4115559ae46ab71aee886c155badd6fa56a
3
- size 16854340
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af9a1370a65e24266f2cc708658934eb25553293bf4d5edacea963268327e8ff
3
+ size 35751099
pypi-packages-metadata-000000000546.json CHANGED
@@ -1,4 +1,3 @@
1
- {"name":"py-lamina","version":"3.0.0","summary":"Add a new layer (\"lâmina\") to AWS lambda functions","description":"# Welcome to Lamina\n\n\u003cp align=\"center\"\u003e\n\u003ca href=\"https://pypi.org/project/py-lamina/\" target=\"_blank\"\u003e\n\u003cimg alt=\"PyPI\" src=\"https://img.shields.io/pypi/v/py-lamina\"/\u003e\u003c/a\u003e\n\u003ca href=\"https://www.python.org\" target=\"_blank\"\u003e\n\u003cimg alt=\"PyPI - Python Version\" src=\"https://img.shields.io/pypi/pyversions/py-lamina\"/\u003e\n\u003c/a\u003e\n\u003c/p\u003e\n\nThis library adds a new layer (\"lâmina\") to AWS lambda functions, integrating synchronous and asynchronous code in a\nsingle function, which use Pydantic models to validate input and output data.\n\n---\n\n### Install\n\n```shell\n$ pip install py-lamina\n```\n\nThis library is compatible with Python 3.9, 3.10 and 3.11.\n\n---\n\n### Usage\n\nCreate the models for Input and Output data:\n\n```python\n# schemas.py\n\nfrom pydantic import BaseModel\n\nclass ExampleInput(BaseModel):\n name: str\n age: int\n\nclass ExampleOutput(BaseModel):\n message: str\n```\n\nCreate your AWS Lambda handler:\n\n```python\n# main.py\nfrom typing import Any, Dict, Tuple, Union\nfrom lamina import lamina, Request\nfrom .schemas import ExampleInput, ExampleOutput\n\n@lamina(schema=ExampleInput, schema_out=ExampleOutput)\ndef handler(request: Request) -\u003e Dict[str, Any]:\n response = {\"message\": f\"Hello {request.data.name}, you are {request.data.age} years old!\"}\n return response\n```\n\nYou can also use an async handler:\n\n```python\n# main.py\nimport asyncio\n\n@lamina(schema=ExampleInput, schema_out=ExampleOutput)\nasync def handler(request: Request) -\u003e Dict[str, Any]:\n await asyncio.sleep(1)\n response = {\"message\": f\"Hello {request.data.name}, you are {request.data.age} years old!\"}\n return response\n```\n\n### The Response Status Code\nDefault value is 200. You can change it by returning a tuple with the response and the status code:\n\n```python\n@lamina(schema=ExampleInput, schema_out=ExampleOutput)\ndef handler(request: Request) -\u003e Tuple[Dict[str, Any], int]:\n response = {\"message\": f\"Hello {request.data.name}, you are {request.data.age} years old!\"}\n return response, 201\n```\n\n### The Response Content Type\nDefault content type is `application/json; charset=utf-8`. You can change it by defining the `content_type` parameter:\n\n```python\n@lamina(schema=ExampleInput, content_type=Lamina.HTML)\ndef handler(request: Request) -\u003e Tuple[str, int]:\n html_404 = \"\"\"\n \u003chtml\u003e\n \u003chead\u003e\u003ctitle\u003e404 Not Found\u003c/title\u003e\u003c/head\u003e\n \u003cbody\u003e\n \u003ch1\u003e404 Not Found\u003c/h1\u003e\n \u003c/body\u003e\n \u003c/html\u003e\n \"\"\"\n return html_404, 404\n```\n\n### The Response Headers\nDefault header contains the Content Type defined in decorator or `{\"Content-Type\": \"application/json; charset=utf-8\"}`\nby default.\nYou can add more headers it by returning a dict in the function return tuple:\n\n```python\n@lamina(schema=ExampleInput, content_type=Lamina.HTML)\ndef handler(request: Request) -\u003e str:\n return None, 403, {\"Location\": \"https://www.example.com\"}\n```\n\nThis dict will be merged with the default header.\n\n### The Request object\n\nThe `Request` object has the following attributes:\n* `data`: The input data, already validated by the schema.\n* `event`: The original event received by the lambda function.\n* `context`: The original context received by the lambda function.\n\nYou can use lamina without one or both schemas, if you like:\n\n```python\n# main.py\nimport json\nfrom typing import Any, Dict\n\nfrom lamina import lamina, Request\n\n\n@lamina()\ndef handler(request: Request) -\u003e Dict[str, Any]:\n body = request.event[\"body\"]\n data = json.loads(body)\n response = {\"message\": f\"Hello {data.name}, you are {data.age} years old!\"}\n return response\n```\n\nPlease note, if you do not define a SchemaIn, the `data` attribute will contain the **original** body from event. You\nneed to validate it yourself and convert it to a dict, bytes, etc... you need before using the data received.\n\n---\n\n## License\n\nThis project is licensed under the terms of the MIT license.\n","description_content_type":"text/markdown","author":"Chris Maillefaud","author_email":"[email protected].github.com","license":"MIT","keywords":"aws, lambda, decorator","classifiers":["Development Status :: 4 - Beta","Environment :: Plugins","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12"],"platform":[],"home_page":"https://github.com/megalus/lamina","requires_python":"\u003c4,\u003e=3.11","requires":[],"provides":[],"obsoletes":[],"requires_dist":["asgiref","loguru","pydantic"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Repository"],"uploaded_via":"poetry/1.8.3 CPython/3.12.1 Linux/6.5.0-1025-azure","upload_time":"2024-10-09 22:09:27.970792 UTC","filename":"py_lamina-3.0.0-py3-none-any.whl","size":"5448","path":"cf/88/940db94d9d1ae0f58111baa7e90edaad7660f4f130ea89601762e1e31eeb/py_lamina-3.0.0-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"a884e74b240251ab73ab33c04b24586a","sha256_digest":"b7ed92e3c87713196093a1e1b6a24d4711d80c5c575b69be0e532b3cfd3560ad","blake2_256_digest":"cf88940db94d9d1ae0f58111baa7e90edaad7660f4f130ea89601762e1e31eeb","license_files":[]}
2
- {"name":"PlayDrissionPage","version":"0.0.2.10","summary":"Playwright and DrissionPage","description_content_type":"text/markdown","author":"xx299x","author_email":"[email protected]","license":"BSD","keywords":"Playwright, DrissionPage","classifiers":["Development Status :: 4 - Beta","License :: OSI Approved :: BSD License","Programming Language :: Python :: 3.6","Topic :: Utilities"],"platform":[],"home_page":"https://gitee.com/xx299x/PlayDrissionPage","requires_python":"\u003e=3.6","requires":[],"provides":[],"obsoletes":[],"requires_dist":["DrissionPage==4.1.0.6","playwright==1.43.0","requests"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"twine/5.1.0 CPython/3.11.5","upload_time":"2024-10-26 09:26:49.914328 UTC","filename":"PlayDrissionPage-0.0.2.10.tar.gz","size":"10254","path":"3b/81/4c7eb06c32e934202b37935c3503b9e4f752c820a4f9b5bc658d63012c38/PlayDrissionPage-0.0.2.10.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"664855f03e263f6f04ff64e8530cdd6e","sha256_digest":"7eba8f85b153ecf69bcc4b6dacfa3d3005bf1171fedd63cb673b10617d7cff0f","blake2_256_digest":"3b814c7eb06c32e934202b37935c3503b9e4f752c820a4f9b5bc658d63012c38","license_files":[]}
3
- {"name":"hasmtools","version":"0.0.3","summary":"Visualize Home Assistant Finite State Machines","description":"# hasmtools\n\nA small tool to help me working with the Finite State Machine Sensor for Home Assistant by\ngenerating a png and converting the definition between JSON and YAML.\n\nSee https://github.com/edalquist/ha_state_machine for the Sensor.\n\n# Features\n\n* write your definition in YAML instead of JSON\n* convert from YAML to JSON and back\n* generate an image of the state machine using graphviz\n\n# Examples\n\n## YAML input file\n\n```yaml\ninitial:\n a: state2\n b: state3\nstate2:\n a: initial\n b: state3\nstate3:\n a: initial\n b: state2\n```\nNote that in yaml the first state is the default/initial state.\n\n## Converted to JSON\n\n```\n $ hasmtool example.yaml example.json\n```\n\n```json\n{\n \"state\": {\n \"status\": \"initial\"\n },\n \"transitions\": {\n \"initial\": {\n \"a\": \"state2\",\n \"b\": \"state3\"\n },\n \"state2\": {\n \"a\": \"initial\",\n \"b\": \"state3\"\n },\n \"state3\": {\n \"a\": \"initial\",\n \"b\": \"state2\"\n }\n }\n}\n```\n\n## Finally converted to PNG\n\n```\n $ hasmtool example.json example.png\n```\n\n![State Machine rendered as png](https://github.com/mutax/hasmtools/raw/main/example.png)\n\n## An example with timeouts\n\nsource: https://github.com/edalquist/ha_state_machine/blob/main/example.json\n\n```json\n{\n \"state\": {\n \"status\": \"IDLE\"\n },\n \"transitions\": {\n \"IDLE\": {\n \"above\": \"STARTING\"\n },\n \"STARTING\": {\n \"timeout\": { \"after\": 6, \"to\": \"RUNNING\" },\n \"below\": \"IDLE\"\n },\n \"RUNNING\": {\n \"below\": \"STOPPING\"\n },\n \"STOPPING\": {\n \"timeout\": { \"after\": 15, \"to\": \"DONE\" },\n \"above\": \"RUNNING\",\n \"middle\": \"RUNNING\"\n },\n \"DONE\": {\n \"timeout\": { \"after\": 15, \"to\": \"IDLE\" }\n }\n }\n}\n```\n\n![FSM with timeouts](https://github.com/mutax/hasmtools/raw/main/timeouts.png)\n\n\n\n# Status\n\nWorking MVP.\n\n# Why Did I Do This?\n\nIt started with the desire to have a visualization of the FSM, then I realised I like yaml more than json for it, so one thing led to another...\n","description_content_type":"text/markdown","author_email":"Florian Streibelt \[email protected]\u003e","license":"Apache-2.0","keywords":"HomeAssistant, FSM","classifiers":["Development Status :: 4 - Beta","Programming Language :: Python :: 3"],"platform":[],"requires_python":"\u003e=3.11","requires":[],"provides":[],"obsoletes":[],"requires_dist":["click\u003e=8.1.7","pydot\u003e=3.0.3","PyYAML\u003e=6.0.2"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changelog","Documentation","Source"],"uploaded_via":"twine/5.1.1 CPython/3.12.7","upload_time":"2024-12-09 03:40:40.166929 UTC","filename":"hasmtools-0.0.3-py3-none-any.whl","size":"10003","path":"07/21/59b0ba73c200f3ae36f5f632d7028b3ad55d72288d69cf413bb5e7bc80c5/hasmtools-0.0.3-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"e7d1f518d4601610bd42e07998fff2e8","sha256_digest":"52f395258b46830d434d1d114f7eb810020b966339c829c1be9738a234ceb1a4","blake2_256_digest":"072159b0ba73c200f3ae36f5f632d7028b3ad55d72288d69cf413bb5e7bc80c5","license_files":[]}
4
- {"name":"gluex.hddm-r","version":"1.0.8","summary":"methods for reading and writing GlueX reconstructed event data","description":"# gluex.hddm\\_r - i/o library for reading and writing reconstructed events from the GlueX detector\n\nThe gluex.hddm\\_r module is a python wrapper around the c++ library that implements reading \nand writing of reconstructed events from the GlueX detector, based on the HDDM event i/o\nframework. Every hddm\\_r file consists of a plain-text header describing the structure\nof the event data contained in the file in xml format known as a hddm template. After\nthe header follows compressed binary data describing the sequence of reconstructed events\ncontained in the file. All files with valid hddm\\_r events share a compatible template\nindicated by the class=\"s\" attribute in the first line of the file header. All such\nfiles should be readable by this module if they are compliant with the HDDM standard.\nFor more details on the standard, see https://github.com/rjones30/HDDM.\n\nFor details on the hddm\\_r API, install gluex.hddm\\_r and type \"pydoc gluex.hddm\\_r\".\nHere is a quickstart example of an analysis tool that reads from hddm\\_r input files.\n\n\tfrom gluex import hddm_r\n\tfor rec in hddm_r.istream(\"http://nod25.phys.uconn.edu:2880/Gluex/simulation\" +\n\t \"/simsamples/particle_gun-v5.2.0/particle_gun001_019_rest.hddm\"):\n\t for pe in rec.getPhysicsEvents():\n\t print(f\"http streaming reader found run {pe.runNo}, event {pe.eventNo}\")\n\t\n\tfor rec in hddm_r.istream(\"https://nod25.phys.uconn.edu:2843/Gluex/simulation\" +\n\t \"/simsamples/particle_gun-v5.2.0/particle_gun001_019_rest.hddm\"):\n\t for pe in rec.getPhysicsEvents():\n\t print(f\"https streaming reader found run {pe.runNo}, event {pe.eventNo}\")\n\t\n\tfor rec in hddm_r.istream(\"root://nod25.phys.uconn.edu/Gluex/simulation\" +\n\t \"/simsamples/particle_gun-v5.2.0/particle_gun001_019_rest.hddm\"):\n\t for pe in rec.getPhysicsEvents():\n\t print(f\"xrootd streaming reader run {pe.runNo}, event {pe.eventNo}\")\n\t\n","description_content_type":"text/markdown","author":"Richard T. Jones","author_email":"\"Richard T. Jones\" \[email protected]\u003e","maintainer_email":"\"Richard T. Jones\" \[email protected]\u003e","license":"MIT License","keywords":"hddm, GlueX, Gluex, gluex, hdgeant4, hdgeant","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Programming Language :: Python","Topic :: Scientific/Engineering :: Physics"],"platform":[],"home_page":"https://github.com/rjones30/hddm_r","requires_python":"\u003e=3.6","requires":[],"provides":[],"obsoletes":[],"requires_dist":[],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changelog","Documentation","Homepage","Issues","Repository"],"uploaded_via":"twine/5.1.0 CPython/3.12.4","upload_time":"2024-06-25 22:06:07.255365 UTC","filename":"gluex.hddm_r-1.0.8-cp39-cp39-win32.whl","size":"1270629","path":"81/e5/4c51693a8fda1000890457cfbf3d5f38982c78951271f3811b12f527e871/gluex.hddm_r-1.0.8-cp39-cp39-win32.whl","python_version":"cp39","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"43ae156fc7ab551e7ac15d02f798aec3","sha256_digest":"403afe7fc9ac7db8b43f3afb9982f0b335de6cd9011a47ca638b75b86215ebfc","blake2_256_digest":"81e54c51693a8fda1000890457cfbf3d5f38982c78951271f3811b12f527e871","license_files":[]}
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ea3b69448b1643bb3961c3c3a0f1d1bb2e9cc835727b45e540f0dcb886337b1
3
+ size 64959028
 
pypi-packages-metadata-000000000552.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000557.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000558.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000562.json CHANGED
@@ -1,14 +1,3 @@
1
- {"name":"mkdocs-asciinema-player","version":"0.7.1","summary":"A Mkdocs Plugin to include asciinema player in your documentation.","description":"# mkdocs-asciinema-player\n\n[![documentation](https://img.shields.io/badge/docs-mkdocs--asciinema--player-blue.svg?style=flat)](https://pa-decarvalho.github.io/mkdocs-asciinema-player/)\n[![test](https://github.com/pa-decarvalho/mkdocs-asciinema-player/workflows/test/badge.svg)](https://github.com/pa-decarvalho/mkdocs-asciinema-player/actions)\n[![pypi](https://img.shields.io/pypi/v/mkdocs-asciinema-player.svg)](https://pypi.org/project/mkdocs-asciinema-player/)\n[![downloads](https://img.shields.io/pypi/dm/mkdocs-asciinema-player.svg)](https://pypi.org/project/mkdocs-asciinema-player/)\n\nA Mkdocs Plugin to include asciinema player in your documentation.\n\n## Quick Setup\n\n### Installation\n\nInstall the plugin via pip :\n\n```sh\npip install mkdocs-asciinema-player\n```\n\n### Configure\n\nIn your `mkdocs.yml`, add `asciinema-player` to the `plugins` section :\n\n```yaml\nplugins:\n - search\n - asciinema-player\n```\n\n### Usage\n\nIn your MkDocs docs folder, add any [asciinema](https://asciinema.org/) `.cast` file and add this to any markdown page :\n\n````markdown\n```asciinema-player\n{\n \"file\": \"assets/asciinema/bootstrap.cast\"\n}\n```\n````\n\n### More\n\nFor more information on the plugin, you can visit the the following [website](https://pa-decarvalho.github.io/mkdocs-asciinema-player/)\n\nYou will find the complete documentation of the plugin, examples of use, installation and configuration instructions.\n","description_content_type":"text/markdown","author_email":"Philippe-André De Carvalho \[email protected]\u003e","license":"MIT License Copyright (c) 2023 Philippe-André De Carvalho Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ","keywords":"mkdocs, documentation, asciinema","classifiers":["Development Status :: 3 - Alpha","Environment :: Console","Intended Audience :: Developers","Intended Audience :: Information Technology","Intended Audience :: System Administrators","Natural Language :: English","Operating System :: POSIX","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.9","Topic :: Software Development :: Build Tools","Topic :: Software Development :: Documentation","Topic :: Utilities"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["mkdocs\u003e=1.5.3","asciinema==2.4.0; extra == \"dev\"","commitizen==3.13.0; extra == \"dev\"","bandit==1.7.5; extra == \"lint\"","djlint==1.34.0; extra == \"lint\"","flake8==6.0.0; extra == \"lint\"","mypy==1.7.1; extra == \"lint\"","pylint==3.0.2; extra == \"lint\"","yamllint==1.32.0; extra == \"lint\"","pytest==7.4.0; extra == \"test\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Issues","Source"],"uploaded_via":"twine/5.0.0 CPython/3.12.2","upload_time":"2024-03-22 09:36:55.995901 UTC","filename":"mkdocs-asciinema-player-0.7.1.tar.gz","size":"79353","path":"be/1f/d1fcd760d492412d5d7ece32eab127d957eaab7cf178492309cba9756280/mkdocs-asciinema-player-0.7.1.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"a085d4363e24e2108df836161c5ff7b4","sha256_digest":"391f6d0b1826106301cbac607577a5fb74f965c8121dd87af5591cae5f0d7c3f","blake2_256_digest":"be1fd1fcd760d492412d5d7ece32eab127d957eaab7cf178492309cba9756280","license_files":[]}
2
- {"name":"tach","version":"0.10.7","summary":"A Python tool to maintain a modular package architecture.","description":"\n# Tach\n\n[![downloads](https://static.pepy.tech/badge/tach/month)](https://pepy.tech/project/tach)\n[![version](https://img.shields.io/pypi/v/tach.svg)](https://pypi.Python.org/pypi/tach)\n[![license](https://img.shields.io/pypi/l/tach.svg)](https://pypi.Python.org/pypi/tach)\n[![python](https://img.shields.io/pypi/pyversions/tach.svg)](https://pypi.Python.org/pypi/tach)\n[![ci](https://github.com/gauge-sh/tach/actions/workflows/ci.yml/badge.svg)](https://github.com/gauge-sh/tach/actions/workflows/ci.yml)\n[![pyright](https://microsoft.github.io/pyright/img/pyright_badge.svg)](https://microsoft.github.io/pyright/)\n[![ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)\n\na Python tool to enforce dependencies, written in Rust. Inspired by modular monolithic architecture.\n\n[Docs](https://docs.gauge.sh)\n\n[Discord](https://discord.gg/Kz2TnszerR)\n\n\u003cdiv align=\"center\"\u003e\n \u003cimg src=\"docs/assets/light_logo.png\" alt=\"gauge-logo\" width=\"30%\" style=\"border-radius: 50%; padding-bottom: 20px\"/\u003e\n\u003c/div\u003e\n\nTach lets you define and enforce dependencies between Python modules within your project.\n\nHere's an example:\n\n![tach_demo](https://github.com/gauge-sh/tach/assets/10570340/6fc1e9b4-5a78-4330-a329-9187bd9c374d)\n\nIf a module tries to import from another module that is not listed as a dependency, Tach can prevent it.\n\nTach is:\n- 🌎 Open source\n- 🐍 Installable via pip\n- 🔧 Able to be adopted incrementally\n- ⚡ Implemented with no runtime impact\n- ♾️ Interoperable with your existing systems (cli, hooks, ci, etc.)\n\n## Getting Started\n\n### Installation\n\n```bash\npip install tach\n```\n\n### Setup\n\nTach allows you to configure where you want to place module boundaries in your project.\n\nYou can do this interactively - run:\n\n```bash\n tach mod\n# Up/Down: Navigate Enter: Mark/unmark module Right: Expand Left: Collapse Ctrl + Up: Jump to parent\n# Ctrl + s: Exit and save Ctrl + c: Exit without saving Ctrl + a: Mark/unmark all\n```\n\nMark each module boundary with 'Enter'. You can mark all of your top-level Python source packages, or just a few which you want to isolate.\n\nIf your Python code lives below your project root, or if you are working in a monorepo with multiple Python packages, mark your Python [source roots](https://docs.gauge.sh/usage/configuration#source-roots) using the 's' key.\n\nThis will create the config file for your project, `tach.toml`.\n\nOnce you've marked all the modules you want to enforce dependencies between, run:\n\n```bash\ntach sync\n```\n\nDependencies that exist between each module you've marked will be written to `tach.toml`.\n\nCheck out what Tach has found!\n\n```\ncat tach.toml\n```\n\nNote: Your [source roots](https://docs.gauge.sh/usage/configuration#source-roots) will implicitly be treated as module boundaries, and can show up as `\u003croot\u003e`.\n\n### Enforcement\n\nTach comes with a cli command to enforce the boundaries that you just set up! From the root of your Python project, run:\n\n```bash\ntach check\n```\n\nYou will see:\n\n```bash\n✅ All module dependencies validated!\n```\n\nYou can validate that Tach is working by either:\n\n1. Commenting out an item in a `depends_on` key in `tach.toml`\n2. By adding an import between modules that didn't previously import from each other.\n\nGive both a try and run `tach check` again. This will generate an error:\n\n```bash\n❌ tach/check.py[L8]: Cannot import 'tach.filesystem'. Module 'tach' cannot depend on 'tach.filesystem'.\n```\n\nEach error indicates an import which violates your dependencies. If your terminal supports hyperlinks, click on the file path to go directly to the error.\n\nWhen an error is detected, `tach check` will exit with a non-zero code. It can be easily integrated with CI/CD, [Pre-commit hooks](https://docs.gauge.sh/usage/commands#tach-install), and [VS Code](https://marketplace.visualstudio.com/items?itemName=Gauge.tach), and more!\n\n### Extras\n\nVisualize your dependency graph.\n\n```bash\ntach show [--web]\n```\n\nTach will generate a graph of your dependencies. Here's what this looks like for Tach:\n\n![tach show](docs/assets/tach_show.png)\n\nNote that this graph is generated remotely with the contents of your `tach.toml` when running `tach show --web`.\n\nIf you would like to use the [GraphViz DOT format](https://graphviz.org/about/) locally, simply running `tach show` will generate `tach_module_graph.dot` in your working directory.\n\nYou can view the dependencies and usages for a given path:\n\n```bash\ntach report my_package/\n# OR\ntach report my_module.py\n```\n\ne.g.:\n\n```bash\n\u003e tach report python/tach/filesystem\n[Dependencies of 'python/tach/filesystem']\npython/tach/filesystem/install.py[L6]: Import 'tach.hooks.build_pre_commit_hook_content'\npython/tach/filesystem/project.py[L5]: Import 'tach.constants.CONFIG_FILE_NAME'\n...\n-------------------------------\n[Usages of 'python/tach/filesystem']\npython/tach/cache/access.py[L8]: Import 'tach.filesystem.find_project_config_root'\npython/tach/cache/setup.py[L7]: Import 'tach.filesystem.find_project_config_root'\n...\n```\n\nTach also supports:\n\n- [Strict public interfaces for modules](https://docs.gauge.sh/usage/strict-mode/)\n- [Deprecating individual dependencies](https://docs.gauge.sh/usage/deprecate)\n- [Manual file configuration](https://docs.gauge.sh/usage/configuration)\n- [Monorepos and namespace packages](https://docs.gauge.sh/usage/configuration#source-roots)\n- [Inline exceptions](https://docs.gauge.sh/usage/tach-ignore)\n- [Pre-commit hooks](https://docs.gauge.sh/usage/commands#tach-install)\n\n\nMore info in the [docs](https://docs.gauge.sh/). Tach logs anonymized usage statistics which can be [opted out](https://docs.gauge.sh/usage/faq/) of.\nIf you have any feedback, we'd love to talk!\n\nIf you have any questions or run into any issues, let us know by either reaching out on [Discord](https://discord.gg/Kz2TnszerR) or submitting a [Github Issue](https://github.com/gauge-sh/tach/issues)!\n\n---\n\n### Contributors\n\n\u003ca href=\"https://github.com/gauge-sh/tach/graphs/contributors\"\u003e\n \u003cimg src=\"https://contrib.rocks/image?repo=gauge-sh/tach\" /\u003e\n\u003c/a\u003e\n\n","description_content_type":"text/markdown; charset=UTF-8; variant=GFM","author_email":"Caelean Barnes \[email protected]\u003e, Evan Doyle \[email protected]\u003e","keywords":"python, module, package, guard, enforcement, boundary, enforcer, domain, architecture","classifiers":["Development Status :: 4 - Beta","Environment :: Console","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3 :: Only","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.13","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Software Development :: Libraries :: Python Modules","Topic :: Software Development :: Quality Assurance"],"platform":[],"requires_python":"\u003e=3.7","requires":[],"provides":[],"obsoletes":[],"requires_dist":["pyyaml~=6.0","tomli-w~=1.0","pydantic~=2.0","rich~=13.0","prompt-toolkit~=3.0","gitpython~=3.1","networkx~=3.0; python_version \u003e \"3.7\"","networkx\u003c4.0,\u003e=2.6; python_version == \"3.7\"","pydot~=2.0","stdlib-list\u003e=0.10.0; python_version \u003c \"3.10\"","eval-type-backport\u003e=0.2.0; python_version \u003c \"3.10\" and python_version \u003e \"3.7\"","importlib-metadata\u003e=6.5; python_version == \"3.7\"","pydantic==2.8.2; python_version \u003e \"3.7\" and extra == \"dev\"","rich==13.8.0; extra == \"dev\"","prompt-toolkit==3.0.47; extra == \"dev\"","pip==24.0; extra == \"dev\"","pyright==1.1.378; extra == \"dev\"","ruff==0.6.3; extra == \"dev\"","setuptools==69.5.1; python_version \u003e \"3.7\" and extra == \"dev\"","twine==5.1.1; python_version \u003e \"3.7\" and extra == \"dev\"","build==1.2.1; python_version \u003e \"3.7\" and extra == \"dev\"","pytest==8.2.2; python_version \u003e \"3.7\" and extra == \"dev\"","pytest-mock==3.14.0; python_version \u003e \"3.7\" and extra == \"dev\"","coverage==7.6.0; python_version \u003e \"3.7\" and extra == \"dev\"","maturin==1.7.1; extra == \"dev\"","pydantic==2.5.3; python_version == \"3.7\" and extra == \"dev\"","setuptools==47.1.0; python_version == \"3.7\" and extra == \"dev\"","twine==4.0.2; python_version == \"3.7\" and extra == \"dev\"","build==1.1.1; python_version == \"3.7\" and extra == \"dev\"","pytest==7.4.4; python_version == \"3.7\" and extra == \"dev\"","pytest-mock==3.11.1; python_version == \"3.7\" and extra == \"dev\"","coverage==7.2.7; python_version == \"3.7\" and extra == \"dev\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Homepage","Issues"],"uploaded_via":"maturin/1.7.1","upload_time":"2024-09-09 05:03:32.583186 UTC","filename":"tach-0.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl","size":"2555883","path":"0e/ff/92d636c9a57fd9e35a8e668a8b2c5abfb0afcfb73e576ed1bbdcee05005c/tach-0.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl","python_version":"cp38","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"3d5db74004cdf981231aea43d498088d","sha256_digest":"7ed0e4080a59a9dd5166b2289fcd98c2a6edefd30091fd5c2fac1e944ae5cee4","blake2_256_digest":"0eff92d636c9a57fd9e35a8e668a8b2c5abfb0afcfb73e576ed1bbdcee05005c","license_files":[]}
3
- {"name":"chromatic_tda","version":"1.1.2","summary":"Tool to compute six-packs of persistence diagrams for chromatic point clouds","description":"# chromatic_tda\n\n`chromatic_tda` is a package for computing six-packs of persistent diagrams of colored point clouds. Currently point clouds in R^2 with 2 or 3 colors are supported (see Future).\n\n# Installation\n\n## Install with `pip`\n\nThe package is uploaded to [PyPI](https://pypi.org/project/chromatic-tda/), so it can be installed with `pip`.\n\nRun `pip install chromatic_tda`.\n\n## Install from [github project](https://github.com/OnDraganov/chromatic-tda)\n\nThe project uses `poetry` for Python dependecy management, which allows you to easily install all you need to run the package:\n\n- Clone the repository\n- Install `poetry`\n- Go to the repository folder in terminal\n- Run `poetry shell`\n- Run `poetry install`\n- [optional] Run `poetry run pytest` to check that the code runs\n\n# How to use\n\nThe basic use of the package is as follows:\n```\nimport chromatic_tda as chro\npoints, labels = ... # load points, labels\nchro_alpha = chro.ChromaticAlphaComplex(points, labels) simplicial_complex = chro_alpha.get_simplicial_complex(\n sub_complex=‘bi-chromatic’\n full_complex=‘all’\n relative=‘mono-chromatic’\n) # these options make sense for three colors; for two use, e.g., just sub_complex='mono-chromatic'\nsix_pack = simplicial_complex.bars_six_pack()\nchro.plot_six_pack(six_pack)\n```\n\nFor more details check the docstrings of the methods and the jupyter notebook file `manual` (in [github repo](https://github.com/OnDraganov/chromatic-tda)). For more background on the theory, check the resources listed below.\n\n\n# Resources\n\nThe code is based on research done at Institute of Science and Technology by Ranita Biswas, Sebastiano Cultrera di Montesano, Ondřej Draganov, Herbert Edelsbrunner and Morteza Saghafian. A draft write up can be found on [arxiv](https://arxiv.org/abs/2212.03128).\n\nA presentation about the main concepts used in this package can be viewed on YouTube: [AATRN Online Seminar: TDA for Chromatic Point Clouds](https://youtu.be/HIqiF00yKaw). A recording of a complementary talk focusing more on the motivations of the work and the combinatorial structures underlying it is accessible through [google drive](https://drive.google.com/file/d/1RBiGlgY4mlRL59eAVmMLrgBCZYN97QnZ/view).\n\n# Contact\n\nIf you have any questions to the code, do not hesitate to contat us. We are also eager to hear from you if you try the code out, and happy to chat about how you can use it on your data. Use, e.g., the mail in my [github profile](https://github.com/OnDraganov).\n\n# Timing\n\nFor a rough idea on the performance, see the two plots below showing times the code took on my laptop.\n\u003cimg src=\"img/timing_1.1.1.png\" width=\"500px\"\u003e\n\u003cimg src=\"img/timing_2d_2c_200exp.png\" width=\"500px\"\u003e\n# License\n\nCopyright ©2023. Institute of Science and Technology Austria (IST Austria). All Rights Reserved. \n\nThis file is part of chromatic_tda, which is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or any later version.\n \nThis program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.\n \nYou should have received a copy of the GNU Affero General Public License along with this program. If not, see \u003chttps://www.gnu.org/licenses/\u003e.\n \nContact the Technology Transfer Office, ISTA, Am Campus 1, A-3400 Klosterneuburg, Austria, +43-(0)2243 9000, [email protected], for commercial licensing opportunities.\n","description_content_type":"text/markdown","author":"Ondrej Draganov","author_email":"[email protected]","license":"AGPL-3.0-or-later","keywords":"tda, topological data analysis, chromatic, chromatic alpha, delaunay, persistence, persistent homology","classifiers":["License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12"],"platform":[],"home_page":"https://github.com/OnDraganov/chromatic-tda","requires_python":"\u003e=3.10","requires":[],"provides":[],"obsoletes":[],"requires_dist":["numpy\u003c3.0.0,\u003e=2.0.0","matplotlib\u003c4.0.0,\u003e=3.8.0","scipy\u003c2.0.0,\u003e=1.14.0","mypy\u003c2.0.0,\u003e=1.3.0","typing_extensions\u003c5.0.0,\u003e=4.11.0","pytest\u003c9.0.0,\u003e=8.1.1"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Repository"],"uploaded_via":"poetry/1.8.2 CPython/3.12.3 Darwin/23.5.0","upload_time":"2024-08-06 14:53:11.224021 UTC","filename":"chromatic_tda-1.1.2-py3-none-any.whl","size":"78849","path":"3c/2c/cfaa4dbe191b14eeafb724ca77cdb18a3d57f0619e726ecf1ac71c2dcbdd/chromatic_tda-1.1.2-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"822d3fa0aa9a33e7f80414b656f64ac4","sha256_digest":"a29fcb2c86b4b8c81627817f369d38e9b23efd1a2aa99bfac3bf85c9e101a52f","blake2_256_digest":"3c2ccfaa4dbe191b14eeafb724ca77cdb18a3d57f0619e726ecf1ac71c2dcbdd","license_files":[]}
4
- {"metadata_version":"2.2","name":"scope-client","version":"1.4.317","summary":"Arthur Python API Client Library","description":"[//]: # (user-facing readme)\n\n# Arthur API Client\n[Arthur](https://www.arthur.ai/) is the platform for centralized monitoring of production models. \nWe help data scientists, product owners, and business leaders accelerate model operations to optimize \nfor accuracy, explainability, and fairness. As a model- and infrastructure-agnostic platform, Arthur \nadds a layer of intelligence to your AI stack and scales with your deployments.\n\nOur API Client makes it easy to integrate your models with the Arthur platform. For help getting started or \nusing the SDK, check out [our documentation](https://docs.arthur.ai/).\n","description_content_type":"text/markdown","author_email":"Arthur \[email protected]\u003e","license":"MIT","keywords":"api arthur client ArthurAI sdk ml model monitoring","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","Topic :: Software Development :: Libraries :: Application Frameworks","Operating System :: OS Independent","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3.9","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.13"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["urllib3\u003c2.1.0,\u003e=1.25.3","python-dateutil","pydantic\u003e=2","typing-extensions\u003e=4.7.1","simple_settings\u003e=1.2.0","Authlib\u003c2,\u003e=1.3.2","click\u003c9,\u003e=8.1","requests\u003c3,\u003e=2"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Arthur Homepage, https://arthur.ai","Documentation, https://docs.arthur.ai"],"uploaded_via":"twine/6.0.1 CPython/3.13.0","upload_time":"2025-01-19 20:07:35.997999 UTC","filename":"scope_client-1.4.317.tar.gz","size":"136242","path":"07/86/e5dc87065c028e71d2f42a77c6bf1b86ee5268334bb5e996740fe01da06e/scope_client-1.4.317.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"690e5770034c9641c6da43ed6c5b21e5","sha256_digest":"f833ef795993e2648670b8f9f0c12170c58b87d72f2b224f6bbf7186343f970b","blake2_256_digest":"0786e5dc87065c028e71d2f42a77c6bf1b86ee5268334bb5e996740fe01da06e","license_files":[]}
5
- {"metadata_version":"2.1","name":"Topsis-Nikhil-Garg-102203275","version":"1.0.1","summary":"A Python package for performing TOPSIS analysis.","description":"# TOPSIS Python Package\r\n\r\nThis package implements the TOPSIS (Technique for Order of Preference by Similarity to Ideal Solution) method for multi-criteria decision-making.\r\n\r\n## Installation\r\n\r\nInstall the package using pip:\r\npip install Topsis-\u003cNikhil_Garg\u003e-\u003c102203275\u003e\r\n\r\n## Usage\r\n\r\nRun the following command after importing the package:\r\n\r\n```python\r\nfrom topsis import Topsis\r\n\r\nTopsis.calculate('input_file.csv', '1,1,1,2', '+,+,-,+', 'output_file.csv')\r\n","description_content_type":"text/markdown","author":"Nikhil_Garg","author_email":"[email protected]","classifiers":["Programming Language :: Python :: 3","License :: OSI Approved :: MIT License","Operating System :: OS Independent"],"platform":[],"home_page":"https://github.com/Bluebird5757/Topsis_Nikhil_Garg_102203275","requires_python":"\u003e=3.6","requires":[],"provides":[],"obsoletes":[],"requires_dist":["pandas","numpy","openpyxl"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"twine/6.0.1 CPython/3.10.9","upload_time":"2025-01-19 19:56:16.687515 UTC","filename":"Topsis_Nikhil_Garg_102203275-1.0.1-py3-none-any.whl","size":"2931","path":"96/c0/15d3e91127165f93d6ca7e4dde16d00361dc006619e53a30bdc386a6ab22/Topsis_Nikhil_Garg_102203275-1.0.1-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"7d3df7af47a223a6547803cfb98d4088","sha256_digest":"578352819f3defdabd138f6649837c39f085e18903a7b9acbc894c67f942d114","blake2_256_digest":"96c015d3e91127165f93d6ca7e4dde16d00361dc006619e53a30bdc386a6ab22","license_files":[]}
6
- {"name":"aiida-workgraph","version":"0.3.2","summary":"Design flexible node-based workflow for AiiDA calculation.","description":"# AiiDA-WorkGraph\n[![PyPI version](https://badge.fury.io/py/aiida-workgraph.svg)](https://badge.fury.io/py/aiida-workgraph)\n[![Unit test](https://github.com/superstar54/aiida-workgraph/actions/workflows/ci.yaml/badge.svg)](https://github.com/superstar54/aiida-workgraph/actions/workflows/ci.yaml)\n[![codecov](https://codecov.io/gh/superstar54/aiida-workgraph/branch/main/graph/badge.svg)](https://codecov.io/gh/superstar54/aiida-workgraph)\n[![Docs status](https://readthedocs.org/projects/aiida-workgraph/badge)](http://aiida-workgraph.readthedocs.io/)\n\nEfficiently design and manage flexible workflows with AiiDA, featuring an interactive GUI, checkpoints, provenance tracking, and remote execution capabilities.\n\n\n\nHere is a detailed comparison between the ``WorkGraph`` with two AiiDA built-in workflow components.\n\n\n| Aspect | WorkFunction | WorkChain | WorkGraph |\n| ------------------------ | ---------------------- | ----------------------------- | ---------------------- |\n| Use Case | Short-running jobs | Long-running jobs | Long-running jobs |\n| Checkpointing | ``No`` | Yes | Yes |\n| Execution order | ``Sequential`` | ``Hybrid Sequential-Parallel``| Directed Acyclic Graph |\n| Non-blocking | ``No`` | Yes | Yes |\n| Implementation | Easy | ``Difficult`` | Easy |\n| Dynamic | ``No`` | ``No`` | Yes |\n| Ready to Use | Yes | ``Need PYTHONPATH`` | Yes |\n| Subprocesses Handling | ``No`` | Launches \u0026 waits | Launches \u0026 waits |\n| Flow Control | All | `if`, `while` | `if`, `while`, `match` |\n| Termination | ``Hard exit`` | ExitCode | ExitCode |\n| Data Passing | Direct passing | Context | Link \u0026 Context |\n| Output Recording | Limited support | Out \u0026 validates | Out |\n| Port Exposing | Limited support | Manual \u0026 automatic | Manual |\n\n\n\n## Installation\n\n```console\n pip install aiida-workgraph\n```\n\nTo install the latest version from source, first clone the repository and then install using `pip`:\n\n```console\ngit clone https://github.com/superstar54/aiida-workgraph\npip install -e aiida-workgraph\n# build widget\ncd aiida_workgraph/widget/\nnpm install\nnpm run build\n# build web frontend\ncd ../../aiida_workgraph/web/frontend/\nnpm install\nnpm run build\n```\n\n## Documentation\nExplore the comprehensive [documentation](https://aiida-workgraph.readthedocs.io/en/latest/) to discover all the features and capabilities of AiiDA Workgraph.\n\n## Demo\nVisit the [Workgraph Collections repository](https://github.com/superstar54/workgraph-collections) to see demonstrations of how to utilize AiiDA Workgraph for different computational codes.\n\n## Examples\nSuppose we want to calculate ```(x + y) * z ``` in two steps. First, add `x` and `y`, then multiply the result with `z`.\n\n```python\nfrom aiida.engine import calcfunction\nfrom aiida_workgraph import WorkGraph\n\n# define add calcfunction\n@calcfunction\ndef add(x, y):\n return x + y\n\n# define multiply calcfunction\n@calcfunction\ndef multiply(x, y):\n return x*y\n\n# Create a workgraph to link the tasks.\nwg = WorkGraph(\"test_add_multiply\")\nwg.tasks.new(add, name=\"add1\")\nwg.tasks.new(multiply, name=\"multiply1\")\nwg.links.new(wg.tasks[\"add1\"].outputs[\"result\"], wg.tasks[\"multiply1\"].inputs[\"x\"])\n\n```\n\nPrepare inputs and submit the workflow:\n\n```python\nfrom aiida import load_profile\n\nload_profile()\n\nwg.submit(inputs = {\"add1\": {\"x\": 2, \"y\": 3}, \"multiply1\": {\"y\": 4}}, wait=True)\nprint(\"Result of multiply1 is\", wg.tasks[\"multiply1\"].outputs[0].value)\n```\n\nStart the web app, open a terminal and run:\n```console\nworkgraph web start\n```\n\nThen visit the page http://127.0.0.1:8000/workgraph, you should find a `first_workflow` Worktree, click the pk and view the WorkGraph.\n\n\u003cimg src=\"docs/source/_static/images/first-workflow.png\" /\u003e\n\n\nOne can also generate the node graph from the process:\n```console\nverdi node generate pk\n```\n\n\u003cimg src=\"docs/source/_static/images/add_multiply.png\"/\u003e\n\n\n## Development\n\n### Pre-commit and Tests\nTo contribute to this repository, please enable pre-commit so the code in commits are conform to the standards.\n```console\npip install -e .[tests, pre-commit]\npre-commit install\n```\n\n### Widget\nSee the [README.md](https://github.com/superstar54/aiida-workgraph/blob/main/aiida_workgraph/widget/README.md)\n\n### Web app\nSee the [README.md](https://github.com/superstar54/aiida-workgraph/blob/main/aiida_workgraph/web/README.md)\n\n\n## License\n[MIT](http://opensource.org/licenses/MIT)\n\n","description_content_type":"text/markdown","author_email":"Xing Wang \[email protected]\u003e","keywords":"aiida, workflows","classifiers":["Development Status :: 1 - Planning","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["numpy~=1.21","node-graph\u003e=0.0.6","anywidget\u003e=0.9.11","aiida-core\u003e=2.3","cloudpickle","aiida-pseudo","aiida-quantumespresso","aiida-shell","fastapi","uvicorn","sphinx_rtd_theme; extra == \"docs\"","sphinx~=7.2; extra == \"docs\"","sphinx-copybutton~=0.5.0; extra == \"docs\"","sphinx-design~=0.5.0; extra == \"docs\"","sphinx-notfound-page~=1.0; extra == \"docs\"","sphinxext-rediraffe~=0.2.4; extra == \"docs\"","sphinx-intl~=2.1.0; extra == \"docs\"","myst-nb~=1.0.0; extra == \"docs\"","nbsphinx; extra == \"docs\"","pre-commit~=2.2; extra == \"pre-commit\"","pylint~=2.17.4; extra == \"pre-commit\"","pytest~=7.0; extra == \"tests\"","pytest-cov\u003c2.11,~=2.7; extra == \"tests\"","playwright; extra == \"tests\"","httpx; extra == \"tests\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Source"],"uploaded_via":"twine/5.1.1 CPython/3.9.19","upload_time":"2024-06-26 20:06:33.830628 UTC","filename":"aiida_workgraph-0.3.2-py3-none-any.whl","size":"1090801","path":"de/0f/e22091c3f022ab07c37de50e78610f53205dfd71c1f8a65ee5dac61d3844/aiida_workgraph-0.3.2-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"3f8dc74559e643c03b4e467ec70be396","sha256_digest":"d1e93aa9047cc83e91bde8a7dc4767c227d3d931dadd01d856881d0a6e473f43","blake2_256_digest":"de0fe22091c3f022ab07c37de50e78610f53205dfd71c1f8a65ee5dac61d3844","license_files":[]}
7
- {"name":"uniprot_lb_poo","version":"0.1.8","summary":"Parses uniprot entries (from uniprot files) and creates Uniprot objects","description":"\n# uniprot_lb_poo\n\n`uniprot_lb_poo` est un package Python qui permet d'analyser et manipuler des données issues de fichiers Uniprot. Il offre des outils pour extraire des informations biologiques, calculer des propriétés biochimiques, et visualiser des caractéristiques des protéines.\n\nCe projet a été développé dans le cadre de l'UE Programmation Avancée Python du Master 1 Bio-Informatique à l'Université Claude Bernard - Lyon 1.\n\n## Structure du package\n\n- **`uniprot.py`** : Contient la classe `Uniprot` qui permet d'analyser des fichiers Uniprot individuels.\n- **`uniprot_collection.py`** : Définit la classe `Collection`, conçue pour manipuler et analyser un ensemble d'objets `Uniprot`.\n- **Images `.png`** : Générées pour visualiser l'abondance relative des acides aminés (ABRL) des entrées Uniprot.\n\n## Fonctionnalités\n\n### Classe `Uniprot`\n- **Extraction d'informations** : Accession number, nom du gène, organisme, séquence peptidique, IDs GO.\n- **Export au format FASTA** : Génère un fichier `.fasta` basé sur les informations de la protéine. Ce fichier est enregistré dans le répertoire `fasta_outputs` (créé s'il n'existe pas déjà).\n- **Calcul de propriétés** :\n - Poids moléculaire.\n - Hydrophobicité moyenne.\n\n### Classe `Collection`\n- **Organisation de données** : Permet de regrouper plusieurs objets `Uniprot` pour des analyses globales.\n- **Méthodes principales** :\n - Tri des protéines par longueur de séquence.\n - Filtrage basé sur l'hydrophobicité.\n - Analyse des IDs GO présents dans le groupe.\n - Génération de graphiques spécifiques (ABRL).\n\n## Utilisation\n\n1. **Installation** :\n\n Installez le package avec pip :\n ```\n pip install uniprot_lb_poo\n ```\n\n2. **Visualisation des graphiques** :\n Les graphiques générés seront enregistrés sous forme de fichiers `.png` dans le répertoire `figures` (créé s'il n'existe pas déjà).\n\n## Pré-requis\n\n- Python 3.10 ou plus récent.\n- Bibliothèques Python :\n - `matplotlib\u003e=3.9.2`\n\n## Exemples rapides\n\n### Test simple\n```python\nfrom uniprot_lb_poo import hello\n\nhello.main()\n# Hello from uniprot_lb_poo !\n\n```\n\n### Objets Uniprot\n```python\nfrom uniprot_lb_poo import uniprot\n\n# Création d'un objet Uniprot à partir d'un fichier\nuniprot_obj = uniprot.uniprot_from_file(\"your/file/path.txt\")\n\n# Affichage des informations\nprint(uniprot_obj)\n\n# Calcul de propriétés\nprint(\"Poids moléculaire :\", uniprot_obj.molecular_weight())\nprint(\"Hydrophobicité moyenne :\", uniprot_obj.average_hydrophobicity())\n\n# Export au format FASTA\nuniprot_obj.fasta_dump()\n```\n\n### Objets Collections\n```python\nimport uniprot_lb_poo\nfrom uniprot_lb_poo import uniprot_collection as collection\nfrom functools import partial\n\n# Création d'un objet Collection à partir d'un fichier contenant plusieurs entrées.\ncollection_1 = collection.collection_from_file(\"your/file/path/multiple_entries.txt\")\n\n# Affichage des informations\nprint(collection_1)\n\n# Ajout de données dans une collection\ncollection_2 = collection.collection_from_file(\"your/file/path/multiple_entries.txt\")\n\nwith open(\"your/file/path/single_entry.txt\", \"r\") as f:\n file_contents = f.read()\n collection_1.add(file_contents)\n\n# Retrait d'éléments dans la collection\ncollection_1.del_(\"PGBM_HUMAN\")\ncollection_2.del_(\"SPRC_BOVIN\")\n\n# Fusion de deux collections\ncollection_3 = collection_2 + collection_1\n\n# Filtrage des objets pour lesquels la séquence peptidique est supérieure à 1000 acides aminés:\nprint(collection_3.filter(partial(collection.filtre_longueur, n=1000)))\n\n# Création d'un plot pour chacun des objets de collection_3\nfor element in collection_3:\n collection_3.draw_ABRL(element.id)\n```\n\n## Auteurs-Etudiants\n\n- Baptiste Riou\n- Lorcan Brenders\n\n## Contributeurs-Professeurs\n\n- Arnaud Mary\n- Guillaume Launay\n","description_content_type":"text/markdown","author_email":"Baptiste RIOU \u0026 Lorcan BRENDERS \[email protected]\u003e","maintainer_email":"Guillaume LAUNAY \u0026 Arnaud MARY \[email protected]\u003e","keywords":"Bioinformatics, Parser, Protein, Uniprot","classifiers":[],"platform":[],"requires_python":"\u003e=3.10","requires":[],"provides":[],"obsoletes":[],"requires_dist":["matplotlib\u003e=3.9.2"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["DOWNLOAD","HomePage"],"uploaded_via":"uv/0.5.4","upload_time":"2024-11-28 08:46:07.728178 UTC","filename":"uniprot_lb_poo-0.1.8.tar.gz","size":"6589","path":"7e/53/bf0a3c745202bec58efc94e7649869b7bb872e71ae369c9293677da0c7c3/uniprot_lb_poo-0.1.8.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"f5a73c622a1dfd0752081b4b73b90e21","sha256_digest":"56f227d66daf51e1c8262fc8ac661a87505675f6b8820a7b95c04837e78fbdf4","blake2_256_digest":"7e53bf0a3c745202bec58efc94e7649869b7bb872e71ae369c9293677da0c7c3","license_files":[]}
8
- {"name":"bencode-cpp","version":"0.0.4a1","summary":"A fast and correct bencode serialize/deserialize library","description":"# bencode-c\n\nbencode serialize/deserialize written with c++, with pybind11.\n\n```shell\npip install bencode-cpp\n```\n\n```python\nimport bencode_cpp\n\n# NOTICE: we decode bencode bytes to bytes, not str.\nassert bencode_cpp.bdecode(b'd5:hello5:worlde') == {b'hello': b'world'}\n\nassert bencode_cpp.bencode(...) == b'...'\n```\n","description_content_type":"text/markdown","author_email":"trim21 \[email protected]\u003e","license":"MIT","keywords":"bencode, bittorrent, bit-torrent, serialize, deserialize, p2p","classifiers":["Development Status :: 2 - Pre-Alpha","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: C++","Programming Language :: Python","Programming Language :: Python :: 3 :: Only","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],"platform":[],"requires_python":"\u003c4.0,\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":[],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":[],"uploaded_via":"twine/5.1.0 CPython/3.10.12","upload_time":"2024-06-03 09:48:58.644408 UTC","filename":"bencode_cpp-0.0.4a1-cp38-cp38-musllinux_1_2_x86_64.whl","size":"1149236","path":"48/10/7e250d467546fcc120d14849855bd026cab7fa8ca1f1844e2b8db9928fbf/bencode_cpp-0.0.4a1-cp38-cp38-musllinux_1_2_x86_64.whl","python_version":"cp38","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"60798b87f5e3c612478d005d74bfb5dc","sha256_digest":"c6f9162e068735bc487814fddd725aad2be98389dd071b4f41c0397ab74306b0","blake2_256_digest":"48107e250d467546fcc120d14849855bd026cab7fa8ca1f1844e2b8db9928fbf","license_files":[]}
9
- {"name":"loopstructuralvisualisation","version":"0.1.3","summary":"3D geological modelling","description":"# loopstructural-visualisation\n\nA LoopStructural interface for pyvista's Plotter class. \n\n\nTo install `pip install loopstructuralvisualisation` or for a jupyter notebook environment (including the pyvista[jupyuter] dependencies) `pip install loopstructuralvisualisation[jupyter]`\n\n","description_content_type":"text/markdown","author_email":"Lachlan Grose \[email protected]\u003e","license":"MIT","keywords":"earth sciences, geology, 3-D modelling, structural geology, uncertainty","classifiers":["Development Status :: 5 - Production/Stable","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Operating System :: MacOS","Operating System :: Microsoft :: Windows","Operating System :: POSIX","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Information Analysis"],"platform":[],"requires_python":"\u003e=3.8","requires":[],"provides":[],"obsoletes":[],"requires_dist":["numpy\u003e=1.18","pyvista\u003e=0.42","LoopStructural","pyvista[all]; extra == \"all\"","pyvista[jupyter]; extra == \"jupyter\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Bug Tracker","Documentation","Source Code"],"uploaded_via":"twine/5.0.0 CPython/3.12.3","upload_time":"2024-05-31 00:39:56.880553 UTC","filename":"loopstructuralvisualisation-0.1.3-py3-none-any.whl","size":"6129","path":"c6/de/7ad59289bf75b576df7345b2b6c388eea08b0ee0d732036a3ee3a32d54f1/loopstructuralvisualisation-0.1.3-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"10c1cf65b77e08a319dcf81b74004dab","sha256_digest":"bf3c5fafcd1a1d3cf47b75557585de11169e405d95f9e9660df6d0101df12acd","blake2_256_digest":"c6de7ad59289bf75b576df7345b2b6c388eea08b0ee0d732036a3ee3a32d54f1","license_files":[]}
10
- {"name":"pybuildingenergy","version":"1.0.7","summary":"Energy Simulation of building usin ISO52016 and more","description":"================\npyBuildingEnergy\n================\n\n\n.. image:: https://github.com/EURAC-EEBgroup/pyBuildingEnergy/blob/master/src/pybuildingenergy/assets/Logo_pyBuild.png\n :width: 800\n :height: 250\n\nCitation\n----------\nPlease cite us if you use the library\n\n.. image:: https://zenodo.org/badge/761715706.svg\n :target: https://zenodo.org/doi/10.5281/zenodo.10887919\n\nFeatures\n--------\n\nThe new EPBD recast provides an update on building performance assessment through a methodology that must take into account various aspects such as the thermal characteristics of the building, the use of energy from renewable sources, building automation and control systems, ventilation, cooling, energy recovery, etc.\n\nThe methodology should represent the actual operating conditions, allow for the use of measured energy for accuracy and comparability purposes, and be based on hourly or sub-hourly intervals that take into account the variable conditions significantly impacting the operation and performance of the system, as well as internal conditions.\nThe energy performance of a building shall be expressed by a numeric indicator of primary energy use per unit of reference floor area per year, in kWh/(m2.y) for the purpose of both energy performance certification and compliance with minimum energy performance requirements. Numeric indicators of final energy use per unit of reference floor area per year, in kWh/(m2.y) and of energy needs according to ISO 52000 in kWh/(m².y) shall be used. The methodology applied for the determination of the energy performance of a building shall be transparent and open to innovation and reflect best practices, in particular from additional indicators.\nMember States shall describe their national calculation methodology based on Annex A of the key European standards on energy performance of buildings, namely EN ISO 52000-1, EN ISO 52003-1, EN ISO 52010-1,EN ISO 52016-1, EN ISO 52018-1,EN 16798-1, EN 52120-1 and EN 17423 or superseding documents. This provision shall not constitute a legal codification of those standards.\n\n**pyBuildingEnergy** aims to provide an assessment of building performance both in terms of energy and comfort. In this initial release, it is possible to assess the energy performance of the building using ISO 52106-1:2018. Additional modules will be added for a more comprehensive evaluation of performance, assessing ventilation, renewable energies, systems, etc.\nThe actual calculation methods for the assessment of building performance are the following:\n\n- [x] the (sensible) energy need for heating and cooling, based on hourly or monthly calculations;\n\n- [ ] the latent energy need for (de-)humidification, based on hourly or monthly calculations;\n\n- [x] the internal temperature, based on hourly calculations;\n\n- [x] the sensible heating and cooling load, based on hourly calculations;\n\n- [ ] the moisture and latent heat load for (de-)humidification, based on hourly calculations;\n\n- [ ] the design sensible heating or cooling load and design latent heat load using an hourly calculation interval;\n\n- [ ] the conditions of the supply air to provide the necessary humidification and dehumidification.\n\nThe calculation methods can be used for residential or non-residential buildings, or a part of it, referred to as \"the building\" or the \"assessed object\".\nISO 52016-1:2018 also contains specifications for the assessment of thermal zones in the building or in the part of a building. The calculations are performed per thermal zone. In the calculations, the thermal zones can be assumed to be thermally coupled or not.\nISO 52016-1:2018 is applicable to buildings at the design stage, to new buildings after construction and to existing buildings in the use phase\n\n\nWeather Data\n------------\nThe tool can use wather data coming from 2 main sources:\n\n- pvgis api (https://re.jrc.ec.europa.eu/pvg_tools/en/) - PHOTOVOLTAIC GEOGRAPHICAL INFORMATION SYSTEM\n- .epw file from https://www.ladybug.tools/epwmap/\n\nMore details in the example folder\n\n\nDomestic Hot Water - DHW\n------------------------\n- [x] Calculation of volume and energy need for domestic hot water according to ISO 12831-3. \n- [] Assessment of thermal load based on the type of DHW system\n\n\nLimitations\n------------\nThe library is developed with the intent of demonstrating specific elements of calculation procedures in the relevant standards. It is not intended to replace the regulations but to complement them, as the latter are essential for understanding the calculation. \nThis library is meant to be used for demonstration and testing purposes and is therefore provided as open source, without protection against misuse or inappropriate use.\n\nThe information and views set out in this document are those of the authors and do not necessarily reflect the official opinion of the European Union. Neither the European Union institutions and bodies nor any person acting on their behalf may be held responsible for the use that may be made of the information contained herein.\n\nThe calculation is currently aimed at single-zone buildings with ground floor. The evaluation of multi-zone buildings is under evaluation.\n\nGetting Started\n----------------\nThe following command will install the latest pyBuildinEnergy library\n\n::\n\n pip install pybuildingenergy\n\n\nThe tool allows you to evaluate the performance of buildings in different ways: \n\n* by running simulations of buildings (archetypes) already preloaded in the archetypes.pickle file for different nations according to Tabula dataset (currently only Italian buildings are available, but buildings from different nations will be loaded), \n\n ::\n\n python3 pybuildingenergy --archetype\n\n\nHere it is possible, to select two options:\n \n\n . Selection of archetype by providing\n \n * information on building type: single_family_house \n * period of construction: before 1900, 1901-1920,1921-1945,1946-1960,1961-1875,1976-1990,1991-2005,2006-today \n * location: \n **latitude** and **longitude**\n\n . Demo Building having these features: \n\n * single_family_house\n * before 1900,\n * city: Turin\n * lat: 45.071321703968124\n * long: 7.642963669564985\n \n\n* by running best_test600 demo:\n\n ::\n\n python3 pybuildingenergy --best_test\n\n\n* your own building. For the latter, you can either upload the information from scratch or preload the information from a building archetype and then edit only the information you know.\n \n See `Examples \u003chttps://github.com/EURAC-EEBgroup/pyBuildingEnergy/tree/master/examples\u003e`_ folder\n \n\n\nBuilding Inputs\n----------------\n\n.. list-table:: Building geometry data * general\n :widths: 20 20 40 10 10 \n :header-rows: 1\n\n * - Parameter\n - Key\n - Description\n - Unit\n - Mandatory\n * - **Latitude**\n - latitude\n - latitude of the building in [decimal] \n - [-]\n - YES\n * - **Longitude**\n - longitude\n - longitude of the building location [decimal]\n - [-].\n - YES\n * - **Coldest month**\n - coldest_month\n - Define the coldest month of the building location. Value from 1 (January) to 12 (December)\n - [-].\n - YES. Default: 1\n * - **Gross building area**\n - a_use\n - gross floor area of the building\n - [m2]\n - YES\n * - **Slab on ground area** \n - slab_on_ground_area\n - Ground floor gross area\n - [m2]\n - If not provided, the slab on ground are is calculated as useful area / number of floors\n * - **Number of floors**\n - number_of_floor\n - Number of building floors \n - [-]\n - YES/NO if number of floors is provided\n * - **Building perimeter**\n - exposed_perimeter\n - perimeter of the building\n - [m]\n - YES/NO iIf not provided, the perimeter is calculated as if the building were rectangular with one side being 10 meters\n * - **Building height**\n - height\n - external height of the building\n - [m]\n - YES\n * - **Average thickness of wall**\n - wall_thickness\n - average thickness of building walls \n - [m]\n - YES\n * - **Surface of envelope**\n - surface_envelope\n - gross volume of the building \n - [m3]\n - If not provided the volume is calcuated as the slab on ground area * building height\n * - **Volume**\n - volume\n - gross volume of the building \n - [m3]\n - If not provided the volume is calcuated as the slab on ground area * building height\n * - **Annual mean internal temperature**\n - annual_mean_internal_temperature\n - the annual mean internal temperature is the average between Heating and Cooling setpoints\n - [°C]\n - NO: if not provided, it is calculated.\n * - **Annual mean external temperature**\n - annual_mean_external_temperature\n - Annual mean axternal temperature of the building location\n - [°C]\n - NO: if not provided, it is calculated.\n * - **Heating system**\n - heating_mode\n - True if heating system is installed, False if not.\n - [True or False]\n - YES\n * - **Cooling system**\n - cooling-mode\n - True if heating system is installed, False if not.\n - [True or False]\n - YES \n * - **Heating setpoint**\n - heating_setpoint\n - Temperature set-point of the heating system\n - [°C]\n - YES. If `heating_mode` is True\n * - **Cooling setpoint**\n - cooling_setpoint\n - Temperature set-point of the cooling system\n - [°C]\n - YES. If `cooling_mode` is True\n * - **Heating setback**\n - heating_setback\n - Temperature set-back of the heating system\n - [°C]\n - YES. If `heating_mode` is True\n * - **Cooling setback**\n - cooling_setback\n - Temperature set-back of the cooling system\n - [°C]\n - YES. If `cooling_mode` is True\n * - **Max power of heating generator**\n - power_heating_max\n - max power of heating generator\n - [W]\n - YES. If `heating_mode` is True\n * - **Max power of cooling generator**\n - power_cooling_max\n - max power of cooling generator\n - [W]\n - YES. If `cooling_mode` is True\n * - **Air change rate**\n - air_change_rate_base_value\n - value of air chnage rate\n - [m3/h*m2]\n - Yes\n * - **Air change rate extra**\n - air_change_rate_extra\n - extra value of air change rate, in specific period according to the occupancy profile\n - [m3/h*m2]\n - Yes\n * - **Internal Gains**\n - internal_gains_base_value\n - power of internal gains \n - [W/m2] \n - YES\n * - **Extra Internal Gains**\n - internal_gains_base_value\n - extra value of internal gains, in specific period according to the occupancy profile\n - [W/m2] \n - YES\n * - **Thermal bridges**\n - thermal_bridge_heat\n - Overall heat transfer coefficient for thermal bridges (without groud floor)\n - [W/K] \n - YES\n * - **Thermal resistance of floor**\n - thermal_resistance_floor\n - Average thermal resistance of internal floors\n - [m2K/W] \n - YES\n * - **Facade elements type**\n - typology_elements\n - List of all facade elements (Walls, Roof, Ground Floor, Windows).For:\n * Wall, Roof use: \"OP\" (Opaque elements)\n * Ground Floor: use \"GF\" (Ground Floor)\n * Windows: use \"W\" (Windows)\n - [-] \n - YES\n * - **Orienation of facade elements**\n - orientation_elements\n - For each elements of the facade provide the orientation, according to the following abbreviations:\n * NV: North Vertical\n * SV: South Vertical\n * EV: East Vertical\n * WV: West Vertical\n * HOR: Horizontal/Slope (for roof and ground floor)\n - [-] \n - YES\n * - **Solar absorption coefficients**\n - solar_abs_elements\n - Solar absorption coefficient of external (Opaque) facade elements (e.g. walls)\n - [-] \n - YES\n * - **Area of facade elements**\n - area_elements\n - Area of each facade element (e.g. Wall, Window, etc.)\n - [m2] \n - YES\n * - **Transmittance - U**\n - transmittance_U_elements\n - Transmiattance of each facade element.\n - [W/m2K] \n - YES\n * - **Resistance - U**\n - thermal_resistance_R_elements\n - Theraml Resistance of each facade element. \n - [W/m2K] \n - YES\n * - **Thermal capacity - k**\n - thermal_resistance_R_elements\n - Addition of the heat capacity of each layer (i.e. calculated by multiplying the density times its thickness times the SHC of the material)\n - [J/m2K] \n - YES\n * - **g-value**\n - g_factor_windows\n - solar energy transmittance of windows\n - [-] \n - YES\n * - **Heat radiative transfer coefficient - internal**\n - heat_convective_elements_internal\n - convective heat transfer coefficient internal surface for each element\n - [W/m2K] \n - YES\n * - **Heat convective transfer coefficient - external**\n - heat_convective_elements_external\n - convective heat transfer coefficient external surface for each element\n - [W/m2K] \n - YES\n * - **Heat radiative transfer coefficient - internal**\n - heat_radiative_elements_internal\n - radiative heat transfer coefficient internal surface for each element\n - [W/m2K] \n - YES\n * - **Heat radiative transfer coefficient - external**\n - heat_radiative_elements_external\n - radiative heat transfer coefficient external surface for each element\n - [W/m2K] \n - YES\n * - **View factor**\n - sky_factor_elements\n - View factor between building element and the sky\n - [-] \n - YES\n * - **Occupancy profile workdays - internal_gains rate**\n - comf_level_we\n - Occupancy profile for workdays to evalaute the utilization of extra internal gains\n - [-] \n - YES\n * - **Occupancy profile weekends - internal_gains rate**\n - comf_level_we\n - Occupancy profile for weekdays to evalaute the utilization of extra internal gains\n - [-] \n - YES\n * - **Occupancy profile workdays - airflow rate**\n - comf_level_we\n - Occupancy profile for workdays to evalaute the utilization of extra air change rate\n - [-] \n - YES\n * - **Occupancy profile weekend - airflow rate**\n - comf_level_we\n - Occupancy profile for weekend to evalaute the utilization of extra air change rate\n - [-] \n - YES\n * - **Class of buidling construction**\n - construction_class\n - Distribution of the mass for opaque elements (vertical - walls and horizontal - floor/roof) as described in Table B.13 of ISO52016. Possible choices: class_i, class_e, class_ie, class_d\n - [-] \n - YES\n * - **Weather source**\n - weather_source\n - In English, it would be: \"Select which type of source to use for weather data. Choose 'pvgis' for connecting to the `pvgis \u003chttps://re.jrc.ec.europa.eu/pvg_tools/en/\u003e` or 'epw' file if using an epw file, to be download from `here \u003chttps://www.ladybug.tools/epwmap/\u003e`\n - [-] \n - YES\n \nMore information about coefficients are available `here \u003chttps://github.com/EURAC-EEBgroup/pyBuildingEnergy/tree/master/src/pybuildingenergy/data\u003e`\n\n\nDocumentation\n--------------\nCheck our doc `here \u003chttps://pybuildingenergy.readthedocs.io/en/latest/\u003e`\n\nExample\n-------\n\nHere some `Examples \u003chttps://github.com/EURAC-EEBgroup/pyBuildingEnergy/tree/master/examples\u003e` on pybuildingenergy application.\nFor more information\n.....\n \n\nContributing and Support\n-------------------------\n\n**Bug reports/Questions**\nIf you encounter a bug, kindly create a GitLab issue detailing the bug. \nPlease provide steps to reproduce the issue and ideally, include a snippet of code that triggers the bug. \nIf the bug results in an error, include the traceback. If it leads to unexpected behavior, specify the expected behavior.\n\n**Code contributions**\nWe welcome and deeply appreciate contributions! Every contribution, no matter how small, makes a difference. Click here to find out more about contributing to the project.\n\n\nLicense\n--------\n* Free software: MIT license\n* Documentation: https://pybuildingenergy.readthedocs.io.\n\nAcknowledgment\n---------------\nThis work was carried out within European projects: \nInfinite - This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement No 958397, \nModerate - Horizon Europe research and innovation programme under grant agreement No 101069834, \nwith the aim of contributing to the development of open products useful for defining plausible scenarios for the decarbonization of the built environment\n\nReagrding the DHW Calculation: \nThe work was developed using the regulations and results obtained from the spreadsheet created by the EPBCenter.\n\nReference\n----------\n- EN ISO 52010-1:2018 Energy performance of buildings - External climatic conditions - Part 1: Conversion of climatic data for energy calculations\n- EN ISO 52016-1:2018 Energy performance of buildings - Energy needs for heating and cooling, internal temperatures and sensible and latent heat loads \n- EN ISO 12831-3:2018 Energy performance of buildings - Method for calculation of the design heat load - Part 3: Domestic hot water systems heat load and characterisation of needs, Module M8-2, M8-3\n\n\n\n\n","description_content_type":"text/x-rst","author":"Daniele Antonucci","author_email":"Daniele Antonucci \[email protected]\u003e","maintainer_email":"Daniele Antonucci \[email protected]\u003e","license":"MIT License Copyright (c) 2024, Daniele Antonucci Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ","keywords":"energy, building, heating and cooling, 52016, EPBD","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","Intended Audience :: Education","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Operating System :: Microsoft :: Windows","Operating System :: POSIX","Operating System :: Unix","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering","Topic :: Software Development :: Build Tools"],"platform":[],"home_page":"https://github.com/EURAC-EEBgroup/pyBuildingEnergy","requires_python":"\u003e=3.8.0","requires":[],"provides":[],"obsoletes":[],"requires_dist":["bump2version==0.5.11","coverage==4.5.4","pandas\u003e=2.0.3","numpy==1.23.2","DateTime==5.1","timezonefinder==6.2.0","requests==2.31.0","tqdm==4.66.1","Pyarrow==13.0.0","pyecharts==2.0.4","geopy==2.4.1","pvlib==0.10.1","scikit-learn\u003e=1.3.2","urllib3==1.26.18","pyswarms==1.3.0","workalendar==17.0.0","plotly==5.21.0","setuptools; extra == \"testing\"","pytest; extra == \"testing\"","pytest-cov; extra == \"testing\""],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changelog","Documentation","Homepage","Issues","Repository"],"uploaded_via":"twine/1.14.0 pkginfo/1.9.6 requests/2.31.0 setuptools/69.2.0 requests-toolbelt/1.0.0 tqdm/4.66.1 CPython/3.11.6","upload_time":"2024-04-18 14:31:07.672023 UTC","filename":"pybuildingenergy-1.0.7-py3-none-any.whl","size":"46955","path":"61/40/59147e899c271355cc7eb2fcc8abe9cae16e7ed5a2aee442ae6e7b0e3765/pybuildingenergy-1.0.7-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"3a43985b0fe8a8adfc8ab47284145286","sha256_digest":"0994fb1d6630ad5158959c7cc8b9b3dbceae9e63c59c383aee7fa557d69452ee","blake2_256_digest":"614059147e899c271355cc7eb2fcc8abe9cae16e7ed5a2aee442ae6e7b0e3765","license_files":[]}
11
- {"name":"irie","version":"0.0.6","summary":"Infrastructure Resilience Engine","description":"\u003cdiv align=\"center\"\u003e\n\u003ch4\u003e\u003ccode\u003eirie\u003c/code\u003e\u003c/h4\u003e\n\u003chr\u003e\n\u003cspan style=\"font-size: 150;\"\u003e\u003ci\u003e\u003cb\u003ei\u003c/b\u003enfrastructure \u003cb\u003er\u003c/b\u003ees\u003cb\u003ei\u003c/b\u003elience \u003cb\u003ee\u003c/b\u003engine\u003c/i\u003e\u003c/span\u003e\n\u003chr\u003e\n\u003ch4\u003ePEER\u003c/h4\u003e\n\u003c/div\u003e\n\n### Powered By\n\n- [`opensees`](https://pypi.org/project/opensees)\n- [`quakeio`](https://pypi.org/project/quakeio)\n- [`mdof`](https://pypi.org/project/mdof)\n- [`sees`](https://pypi.org/project/sees)\n- [`sdof`](https://pypi.org/project/sdof)\n\n","description_content_type":"text/markdown","author_email":"wd \[email protected]\u003e","keywords":"visualization, seismic, opensees, resilience, post-processing, finite-element-analysis, glTF","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","Intended Audience :: Science/Research","License :: OSI Approved :: BSD License","Operating System :: MacOS","Operating System :: Microsoft :: Windows","Operating System :: POSIX","Operating System :: Unix","Programming Language :: C","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3 :: Only","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering","Topic :: Software Development"],"platform":[],"requires":[],"provides":[],"obsoletes":[],"requires_dist":["opensees","quakeio","mdof","bottle","openbim","folium","pandas","numpy","orjson","jsonschema","beautifulsoup4","autopep8","Django==5.1.2","asgiref==3.8.1","gunicorn==23.0.0","pytz==2021.3","sqlparse==0.5.1","toml\u003e=0.10.2","whitenoise==5.3.0","crispy-bootstrap5","django-filter","django-crispy-forms","django-environ==0.11.2","djangorestframework==3.15.2","djangorestframework-simplejwt==5.3.1","dj-database-url==2.1.0"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Repository"],"uploaded_via":"twine/6.0.1 CPython/3.12.7","upload_time":"2024-12-10 00:52:35.245181 UTC","filename":"irie-0.0.6-py3-none-any.whl","size":"25274101","path":"e0/60/0e3796c853295dde9bbd686fadeafc287086b69a2a1b7425e15c4c8a7ba1/irie-0.0.6-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"773a7bacde29eb5dbfff38d4302738e4","sha256_digest":"794f10e090790a428adf37aaad2ca55058a3a78fe84ee8fa40aa86b62db8f300","blake2_256_digest":"e0600e3796c853295dde9bbd686fadeafc287086b69a2a1b7425e15c4c8a7ba1","license_files":[]}
12
- {"name":"pdf-essentials","version":"0.0.1","summary":"A lightweight Python library for easy access to essential pdf functions","description":"# pdf-essentials\nA lightweight Python library for easy access to essential pdf functions\n","description_content_type":"text/markdown","author":"Oren Halvani","keywords":"anonymizing, chunking, highlighting, metadata, page-numbers, pdf-manipulation","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.9"],"platform":[],"requires_python":"\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["fitz==0.0.1.dev2","pikepdf==9.1.1","pypdf2==3.0.1","reportlab==4.2.2","webcolors==1.13"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Bug Tracker","Homepage"],"uploaded_via":"twine/5.1.1 CPython/3.9.19","upload_time":"2024-08-12 21:38:06.389612 UTC","filename":"pdf_essentials-0.0.1.tar.gz","size":"37224","path":"30/2b/c1597c9eda9a7d795444dae9d06baa77a49be7ddc9006a1b969433d4f696/pdf_essentials-0.0.1.tar.gz","python_version":"source","packagetype":"sdist","has_signature":false,"md5_digest":"6457bff40cd91d8e7bffe03695da033c","sha256_digest":"31ce5d2624c5cef81435d9eb2bc485a76121b5e6f18360b427cf36ecd593ad73","blake2_256_digest":"302bc1597c9eda9a7d795444dae9d06baa77a49be7ddc9006a1b969433d4f696","license_files":[]}
13
- {"name":"modelbase","version":"1.56.3","summary":"A package to build metabolic models","description":"# modelbase\n\n[![DOI](https://img.shields.io/badge/DOI-10.1186%2Fs12859--021--04122--7-blue)](https://doi.org/10.1186/s12859-021-04122-7)\n[![pipeline status](https://gitlab.com/qtb-hhu/modelbase-software/badges/main/pipeline.svg)](https://gitlab.com/qtb-hhu/modelbase-software/-/commits/main)\n[![coverage report](https://gitlab.com/qtb-hhu/modelbase-software/badges/main/coverage.svg)](https://gitlab.com/qtb-hhu/modelbase-software/-/commits/main)\n[![Documentation](https://img.shields.io/badge/Documentation-Gitlab-success)](https://qtb-hhu.gitlab.io/modelbase-software/)\n[![PyPi](https://img.shields.io/pypi/v/modelbase)](https://pypi.org/project/modelbase/)\n[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)\n[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)\n[![security: bandit](https://img.shields.io/badge/security-bandit-yellow.svg)](https://github.com/PyCQA/bandit)\n[![Downloads](https://pepy.tech/badge/modelbase)](https://pepy.tech/project/modelbase)\n\n\nmodelbase is a python package to help you build and analyze dynamic mathematical models of biological systems. It has originally been designed for the simulation of metabolic systems, but can be used for virtually any processes, in which some substances get converted into others.\n\nmodelbase incorporates an easy construction method to define 'reactions'. A rate law and the stoichiometry need to be specified, and the system of differential equations is assembled automatically.\n\nmodelbase allows 'algebraic modules', which are useful to implement rapid equilibrium or quasi steady-state approximations. In the simplest instance, they allow easy incorporation of conserved quantities.\n\nmodelbase also allows a simple construction of isotope-specific models. This class contains a constructor method that automatically construct all isotope specific versions of a particular reaction. Very cool - check it out!\n\n## Release notes\n\nVersions 1.0 and 0.4.0 introduced changes not compatible with the previous official\nrelease 0.2.5. API changes are summarised in [this notebook](docs/source/api-changes.ipynb)\n\n[Version 0.4.5](https://gitlab.com/qtb-hhu/modelbase-software/-/tags/0.4.5) was the prior stable version\n\n[Version 0.2.5](https://gitlab.com/qtb-hhu/modelbase-software/-/tags/initial-release)\nis the official release for the submission of the\nmansucript \"Building mathematical models of biological systems\nwith modelbase, a Python package for semi-automatic ODE assembly\nand construction of isotope-specific models\" to the Journal of Open\nResearch Software.\n\nSee changelog.md for details on changes of earlier versions.\n\n## Installation\n\nIf you quickly want to test out modelbase, or do not require assimulo support, install modelbase via\n\n```bash\npip install modelbase\n```\n\nTo enable assimulo support, the easiest way is to install modelbase via conda. We also recommend using the conda-forge channels.\n\n```bash\n# recommended to avoid package clashes\nconda config --add channels conda-forge\n\n# Create a new environment (not necessary, but recommended)\nconda create -n mb39 python=3.9\nconda install -c conda-forge modelbase\n```\n\n## License\n\n[GPL 3](https://gitlab.com/qtb-hhu/modelbase-software/blob/main/LICENSE)\n\n## Documentation\n\nThe official documentation is hosted [here on gitlab](https://qtb-hhu.gitlab.io/modelbase-software/).\n\n## Issues and support\n\nIf you experience issues using the software please contact us through our [issues](https://gitlab.com/qtb-hhu/modelbase-software/issues) page.\n\n## Contributing to modelbase\n\nAll contributions, bug reports, bug fixes, documentation improvements, enhancements and ideas are welcome. See our [contribution guide](https://gitlab.com/qtb-hhu/modelbase-software/blob/main/CONTRIBUTING.md) for more information.\n\n## How to cite\n\nIf you use this software in your scientific work, please cite [this article](https://rdcu.be/ckOSa):\n\nvan Aalst, M., Ebenhöh, O. \u0026 Matuszyńska, A. Constructing and analysing dynamic models with modelbase v1.2.3: a software update. BMC Bioinformatics 22, 203 (2021)\n\n- [doi](https://doi.org/10.1186/s12859-021-04122-7)\n- [bibtex file](https://gitlab.com/qtb-hhu/modelbase-software/blob/main/citation.bibtex)\n","description_content_type":"text/markdown","author":"Marvin van Aalst","author_email":"[email protected]","maintainer":"Dan Howe","maintainer_email":"[email protected]","license":"GPL-3.0-or-later","keywords":"modelling, ode, metabolic","classifiers":["Development Status :: 5 - Production/Stable","Environment :: Console","Intended Audience :: Developers","Intended Audience :: Science/Research","License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)","Operating System :: MacOS","Operating System :: Microsoft :: Windows","Operating System :: OS Independent","Operating System :: POSIX","Operating System :: Unix","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering","Topic :: Software Development"],"platform":[],"home_page":"https://gitlab.com/qtb-hhu/modelbase-software","requires_python":"\u003c3.13,\u003e=3.9","requires":[],"provides":[],"obsoletes":[],"requires_dist":["black\u003e=24.4","ipywidgets\u003e=8.1","matplotlib\u003e=3.9","numpy\u003e=2.0","pandas\u003e=2.2","python-libsbml\u003e=5.20","scipy\u003e=1.13","sympy\u003e=1.12","tqdm\u003e=4.66","typing-extensions\u003e=4.12"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Documentation","Repository"],"uploaded_via":"poetry/1.2.2 CPython/3.10.6 Linux/5.15.154+","upload_time":"2024-07-25 07:38:27.821787 UTC","filename":"modelbase-1.56.3-py3-none-any.whl","size":"103206","path":"1f/cb/c673ff79303711ce2912998187f5b032c89458949ef5a4bd274fdd8d77ee/modelbase-1.56.3-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"11d9a6d7b4924bc04c5c93fdc93c57b4","sha256_digest":"18d70758b258099f01c1459e26452d508c4cebbe6571158e60b8d27b21ba6ecc","blake2_256_digest":"1fcbc673ff79303711ce2912998187f5b032c89458949ef5a4bd274fdd8d77ee","license_files":[]}
14
- {"name":"nyx-client","version":"0.2.0","summary":"Nyx Client offers a simple API for interacting with data in your Nyx network.","description":"\u003cdiv align=\"center\"\u003e\n\n# Nyx Client SDK\n\n[![Version](https://img.shields.io/pypi/v/nyx-client)](https://pypi.org/project/nyx-client)\n[![License](https://img.shields.io/badge/License-Apache%202.0-yellow.svg)](https://github.com/Iotic-Labs/nyx-sdk/blob/main/LICENSE)\n[![Build](https://github.com/Iotic-Labs/nyx-sdk/actions/workflows/build.yaml/badge.svg?branch=main)](https://github.com/Iotic-Labs/nyx-sdk/actions/workflows/build.yaml)\n[![Read The Docs](https://readthedocs.org/projects/nyx-client/badge/?version=stable)](https://nyx-client.readthedocs.io/en/stable)\n[![GitHub Repo stars](https://img.shields.io/github/stars/Iotic-Labs/nyx-sdk)](https://github.com/Iotic-Labs/nyx-sdk)\n[![Discord](https://img.shields.io/discord/1285252646554304555)](https://discord.gg/zS8pVHjqSf)\n\n\n🌟 **Nyx Client SDK** offers a simple API for interacting with data in your Nyx network. Subscribe, search and download data from across your federated network with\nease, or if you're a publisher, easily created, update, or delete your existing data products programmatically!\n\nIf you're looking for an out of the box experience, try installing [nyx-extras](https://github.com/Iotic-Labs/nyx-sdk/tree/main/nyx_extras), to seamlessly and quickly\nbuild powerful AI applications, to interact with data from your Nyx network!\n\n[![https://iotics.com](https://img.shields.io/badge/Powered%20by-Iotics-blue)](https://iotics.com)\n\u003c/div\u003e\n\n## 🚧 Status\n\nThe Nyx ecosystem is at an early stage of its development, please give us feedback through the [Github issues](https://github.com/Iotic-Labs/nyx-sdk/issues).\n\n## What is the Nyx Client SDK\n\nThe Nyx Client SDK is a Python library that provides an API for easy interaction with the Nyx Platform. It enables end users to seamlessly connect their data to the Nyx ecosystem, search for data, subscribe to it, and consume it. With this SDK, users can ultimately build powerful AI applications.\n\nSeveral examples of SDK usage in an AI context are available:\n\n- [RAG demo examples](https://github.com/Iotic-Labs/nyx-sdk/tree/main/examples)\n- `https://[nyx_host]/try-me-now`\n\nSee also [What is Nyx](https://github.com/Iotic-Labs/nyx-sdk?tab=readme-ov-file#-what-is-nyx)\n\n# 🔥 Quick Start\n\n## Installation\n\nThe Nyx Client SDK is available on [PyPI](https://pypi.org/project/nyx-client/) and can be installed via `pip` running the following command.\n\n`pip install nyx-client`\n\n## First time set up\n\nOnce the library is installed you can run `nyx-client init` to generate the .env file. You'll be asked to provide your Nyx username, password and Nyx endpoint.\n\n\u003cdetails\u003e\n\u003csummary\u003eExample output\u003c/summary\u003e\n\n```shell\nNYX_URL=\u003cENTER URL\u003e\nNYX_EMAIL=\u003cENTER EMAIL\u003e\nNYX_PASSWORD=\u003cENTER PASSWORD\u003e\n```\n\u003c/details\u003e\n\n\n## As a data producer\n\n### I want to connect my Data\n\n```python\nfrom nyx_client import NyxClient\n\nclient = NyxClient()\nclient.create_data(\n name=\"MyData1\",\n title=\"My Data #1\",\n description=\"The description of the data #1\",\n size=1080,\n genre=\"ai\",\n categories=[\"cat1\", \"cat2\", \"cat3\"],\n download_url=\"http://storage/dataset1.csv\",\n content_type=\"text/csv\",\n lang=\"fr\",\n preview=\"col1, col2, col3\\naaa, bbb, ccc\",\n license_url=\"https://opensource.org/licenses/MIT\",\n)\n```\n\n### I want to delete/disconnect my Data\n\n```python\nfrom nyx_client import NyxClient\n\nclient = NyxClient()\nclient.delete_data_by_name(name=\"MyData1\")\n```\n\n## As an application builder\n\n### I want to subscribe to some data\n\n```python\nfrom nyx_client import NyxClient\n\nclient = NyxClient()\nfound_data = client.get_data(categories[genre=\"ai\", categories=[\"cat1\", \"cat\"], content_type=\"text/csv\"])\nfor data in found_data:\n client.subscribe(data)\n```\n\n### I want to consume the data\n\n```python\nfrom nyx_client import NyxClient\n\nclient = NyxClient()\nsubscribed_data = client.my_subscriptions()\nfor data in subscribed_data:\n print(f\"Downloading data {data.name}\")\n content = data.as_string() # NOTE: If this is a binary file, use as_bytes() instead.\n```\n\n## 👉 Gotchas\n\n- If the Data has been deleted/disconnected from Nyx, the SDK will no longer be able to access it. Ensure that the data is still available.\n\nThe Nyx ecosystem is at an early stage of its development, please give us feedback through [Github issues](https://github.com/Iotic-Labs/nyx-sdk/issues).\n\n- If you get the SSL error\n`httpcore.ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate`\n\nthen it may be that you need to set some environment variables like this before running your script:\n```shell\nCERT_PATH=$(python -m certifi)\nexport SSL_CERT_FILE=${CERT_PATH}\nexport REQUESTS_CA_BUNDLE=${CERT_PATH}\n```\n\n## 🐞 Troubleshooting\n\nIf you encounter any issues, ensure that:\n\n- Your virtual environment is activated.\n- All required dependencies are installed.\n- Environment variables are set correctly.\n- If an issue persists, check the Issues section on github\n\nFor further assistance:\n- Refer to the [project documentation](https://nyx-client.readthedocs.io/en/stable)\n - 💡 If you have cloned the Git repo, you can run `make docs` and then view `docs/index.html`.\n- [Raise an issue](https://github.com/Iotic-Labs/nyx-sdk/issues) on GitHub\n- [Chat with us](https://discord.gg/zS8pVHjqSf) on Discord [![Discord](https://img.shields.io/discord/1285252646554304555)](https://discord.gg/zS8pVHjqSf)\n\n## 🤝 Contributing\n\nWe welcome contributions! Please see our [CONTRIBUTING.md](https://github.com/Iotic-Labs/nyx-sdk/blob/main/CONTRIBUTING.md) for guidelines.\n\n## ⚖️ Terms and conditions\n\n[https://www.get-nyx.io/terms](https://www.get-nyx.io/terms)\n\n","description_content_type":"text/markdown","author":"Iotics","author_email":"[email protected]","keywords":"client, data, sdk, ai, exchange, nyx, rag, gen-ai, iotics","classifiers":["Development Status :: 4 - Beta","Intended Audience :: Developers","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Topic :: Internet","Topic :: Internet :: WWW/HTTP","Topic :: Scientific/Engineering","Topic :: Scientific/Engineering :: Artificial Intelligence","Topic :: Software Development :: Libraries","Topic :: Software Development :: Libraries :: Python Modules"],"platform":[],"home_page":"https://www.get-nyx.io","requires_python":"\u003c4.0,\u003e=3.10","requires":[],"provides":[],"obsoletes":[],"requires_dist":["click\u003c9.0.0,\u003e=8.1.7","python-dotenv\u003c2.0.0,\u003e=1.0.1","requests-toolbelt\u003c2.0.0,\u003e=1.0.0"],"provides_dist":[],"obsoletes_dist":[],"requires_external":[],"project_urls":["Changes","Chat","Documentation","Issue tracker","Repository"],"uploaded_via":"twine/5.1.1 CPython/3.12.7","upload_time":"2024-10-24 13:59:18.650889 UTC","filename":"nyx_client-0.2.0-py3-none-any.whl","size":"16954","path":"06/df/b0361178b10e10837ef40fe2a221136b48a7fdb44777b58f1e0459437e4b/nyx_client-0.2.0-py3-none-any.whl","python_version":"py3","packagetype":"bdist_wheel","has_signature":false,"md5_digest":"6c2ea8b5c98ea2b00f5033e02773198e","sha256_digest":"14c5fd5706b776eb06f3eb4a4d4c2774208883f9431442182f16250269aff5e4","blake2_256_digest":"06dfb0361178b10e10837ef40fe2a221136b48a7fdb44777b58f1e0459437e4b","license_files":[]}
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1149ee0d39926a3920cdc6a19b41bb45056b07f8f56d103a52ccd13f1c8f0cff
3
+ size 17142197
 
 
 
 
 
 
 
 
 
 
 
pypi-packages-metadata-000000000591.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000594.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000598.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000600.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000603.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000606.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000608.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000617.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000626.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000627.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000629.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000631.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000632.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000633.json CHANGED
The diff for this file is too large to render. See raw diff
 
pypi-packages-metadata-000000000646.json CHANGED
The diff for this file is too large to render. See raw diff