zfp functionality and servers
Some checks failed
Build Sphinx Docs Set / Build Docs (pull_request) Failing after 1s
Build Project / Build Project (3.10) (pull_request) Successful in 57s
Build Project / Build Project (3.11) (pull_request) Successful in 1m7s
Build Project / Build Project (3.12) (pull_request) Successful in 56s
Test with tox / Test with tox (3.12) (pull_request) Failing after 5m13s
Test with tox / Test with tox (3.11) (pull_request) Failing after 5m48s
Test with tox / Test with tox (3.10) (pull_request) Failing after 8m46s
Some checks failed
Build Sphinx Docs Set / Build Docs (pull_request) Failing after 1s
Build Project / Build Project (3.10) (pull_request) Successful in 57s
Build Project / Build Project (3.11) (pull_request) Successful in 1m7s
Build Project / Build Project (3.12) (pull_request) Successful in 56s
Test with tox / Test with tox (3.12) (pull_request) Failing after 5m13s
Test with tox / Test with tox (3.11) (pull_request) Failing after 5m48s
Test with tox / Test with tox (3.10) (pull_request) Failing after 8m46s
This commit is contained in:
parent
7335dc4c52
commit
9a960e2f29
20
CHANGELOG.md
Normal file
20
CHANGELOG.md
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
---
|
||||||
|
## [0.1.1] - 2026-03-20
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Campaign orchestration** — new `orchestration` module that manages the full lifecycle of an RF data collection campaign: SDR capture, automatic labeling, QA checks, and dataset packaging.
|
||||||
|
- **HTTP inference server** — `ria-server` command starts a REST API server for deploying campaigns and controlling live inference from external systems such as the RIA Hub platform.
|
||||||
|
- **Campaign CLI** — `ria campaign` commands for starting, monitoring, and managing campaigns from the terminal.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **Visualization layout** — recording and dataset views have been reformatted with improved sizing, repositioned titles, and updated Qoherent branding.
|
||||||
|
|
||||||
|
---
|
||||||
645
poetry.lock
generated
645
poetry.lock
generated
|
|
@ -38,14 +38,14 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
version = "4.12.1"
|
version = "4.13.0"
|
||||||
description = "High-level concurrency and networking framework on top of asyncio or Trio"
|
description = "High-level concurrency and networking framework on top of asyncio or Trio"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.10"
|
||||||
groups = ["docs", "server", "test"]
|
groups = ["docs", "server", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c"},
|
{file = "anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708"},
|
||||||
{file = "anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703"},
|
{file = "anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
@ -54,7 +54,7 @@ idna = ">=2.8"
|
||||||
typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
|
typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""]
|
trio = ["trio (>=0.32.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "astroid"
|
name = "astroid"
|
||||||
|
|
@ -73,14 +73,14 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "25.4.0"
|
version = "26.1.0"
|
||||||
description = "Classes Without Boilerplate"
|
description = "Classes Without Boilerplate"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.9"
|
||||||
groups = ["main"]
|
groups = ["main"]
|
||||||
files = [
|
files = [
|
||||||
{file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"},
|
{file = "attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309"},
|
||||||
{file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"},
|
{file = "attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -163,7 +163,7 @@ version = "2026.2.25"
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
groups = ["docs"]
|
groups = ["agent", "docs", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"},
|
{file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"},
|
||||||
{file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"},
|
{file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"},
|
||||||
|
|
@ -269,125 +269,141 @@ pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
version = "3.4.5"
|
version = "3.4.6"
|
||||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
groups = ["docs"]
|
groups = ["agent", "docs"]
|
||||||
files = [
|
files = [
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4167a621a9a1a986c73777dbc15d4b5eac8ac5c10393374109a343d4013ec765"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2e1d8ca8611099001949d1cdfaefc510cf0f212484fe7c565f735b68c78c3c95"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f64c6bf8f32f9133b668c7f7a7cbdbc453412bc95ecdbd157f3b1e377a92990"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e25369dc110d58ddf29b949377a93e0716d72a24f62bad72b2b39f155949c1fd"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:568e3c34b58422075a1b49575a6abc616d9751b4d61b23f712e12ebb78fe47b2"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:259695e2ccc253feb2a016303543d691825e920917e31f894ca1a687982b1de4"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:036c079aa08a6a592b82487f97c60b439428320ed1b2ea0b3912e99d30c77765"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dda86aba335c902b6149a02a55b38e96287157e609200811837678214ba2b1db"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:340810d34ef83af92148e96e3e44cb2d3f910d2bf95e5618a5c467d9f102231d"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fb3c322c81d20567019778cb5a4a6f2dc1c200b886bc0d636238e364848c89"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:cd2d0f0ec9aa977a27731a3209ebbcacebebaf41f902bd453a928bfd281cf7f8"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:4482481cb0572180b6fd976a4d5c72a30263e98564da68b86ec91f0fe35e8565"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b362bcd27819f9c07cbf23db4e0e8cd4b44c5ecd900c2ff907b2b92274a7412"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:39f5068d35621da2881271e5c3205125cc456f54e9030d3f723288c873a71bf9"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77be992288f720306ab4108fe5c74797de327f3248368dfc7e1a916d6ed9e5a2"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8bea55c4eef25b0b19a0337dc4e3f9a15b00d569c77211fa8cde38684f234fb7"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:8b78d8a609a4b82c273257ee9d631ded7fac0d875bdcdccc109f3ee8328cfcb1"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f0cdaecd4c953bfae0b6bb64910aaaca5a424ad9c72d85cb88417bb9814f7550"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ba20bdf69bd127f66d0174d6f2a93e69045e0b4036dc1ca78e091bcc765830c4"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:150b8ce8e830eb7ccb029ec9ca36022f756986aaaa7956aad6d9ec90089338c0"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:76a9d0de4d0eab387822e7b35d8f89367dd237c72e82ab42b9f7bf5e15ada00f"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:e68c14b04827dd76dcbd1aeea9e604e3e4b78322d8faf2f8132c7138efa340a8"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8fff79bf5978c693c9b1a4d71e4a94fddfb5fe744eb062a318e15f4a2f63a550"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3778fd7d7cd04ae8f54651f4a7a0bd6e39a0cf20f801720a4c21d80e9b7ad6b0"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c7e84e0c0005e3bdc1a9211cd4e62c78ba80bc37b2365ef4410cd2007a9047f2"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dad6e0f2e481fffdcf776d10ebee25e0ef89f16d691f1e5dee4b586375fdc64b"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-win32.whl", hash = "sha256:58ad8270cfa5d4bef1bc85bd387217e14ff154d6630e976c6f56f9a040757475"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-win32.whl", hash = "sha256:74a2e659c7ecbc73562e2a15e05039f1e22c75b7c7618b4b574a3ea9118d1557"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:02a9d1b01c1e12c27883b0c9349e0bcd9ae92e727ff1a277207e1a262b1cbf05"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-win_amd64.whl", hash = "sha256:aa9cccf4a44b9b62d8ba8b4dd06c649ba683e4bf04eea606d2e94cfc2d6ff4d6"},
|
||||||
{file = "charset_normalizer-3.4.5-cp310-cp310-win_arm64.whl", hash = "sha256:039215608ac7b358c4da0191d10fc76868567fbf276d54c14721bdedeb6de064"},
|
{file = "charset_normalizer-3.4.6-cp310-cp310-win_arm64.whl", hash = "sha256:e985a16ff513596f217cee86c21371b8cd011c0f6f056d0920aa2d926c544058"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:610f72c0ee565dfb8ae1241b666119582fdbfe7c0975c175be719f940e110694"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:82060f995ab5003a2d6e0f4ad29065b7672b6593c8c63559beefe5b443242c3e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60d68e820af339df4ae8358c7a2e7596badeb61e544438e489035f9fbf3246a5"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60c74963d8350241a79cb8feea80e54d518f72c26db618862a8f53e5023deaf9"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b473fc8dca1c3ad8559985794815f06ca3fc71942c969129070f2c3cdf7281"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6e4333fb15c83f7d1482a76d45a0818897b3d33f00efd215528ff7c51b8e35d"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d4eb8ac7469b2a5d64b5b8c04f84d8bf3ad340f4514b98523805cbf46e3b3923"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bc72863f4d9aba2e8fd9085e63548a324ba706d2ea2c83b260da08a59b9482de"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bcb3227c3d9aaf73eaaab1db7ccd80a8995c509ee9941e2aae060ca6e4e5d81"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cc4fc6c196d6a8b76629a70ddfcd4635a6898756e2d9cac5565cf0654605d73"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:75ee9c1cce2911581a70a3c0919d8bccf5b1cbc9b0e5171400ec736b4b569497"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0c173ce3a681f309f31b87125fecec7a5d1347261ea11ebbb856fa6006b23c8c"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d1401945cb77787dbd3af2446ff2d75912327c4c3a1526ab7955ecf8600687c"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c907cdc8109f6c619e6254212e794d6548373cc40e1ec75e6e3823d9135d29cc"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a45e504f5e1be0bd385935a8e1507c442349ca36f511a47057a71c9d1d6ea9e"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:404a1e552cf5b675a87f0651f8b79f5f1e6fd100ee88dc612f89aa16abd4486f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e09f671a54ce70b79a1fc1dc6da3072b7ef7251fadb894ed92d9aa8218465a5f"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e3c701e954abf6fc03a49f7c579cc80c2c6cc52525340ca3186c41d3f33482ef"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d01de5e768328646e6a3fa9e562706f8f6641708c115c62588aef2b941a4f88e"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a6967aaf043bceabab5412ed6bd6bd26603dae84d5cb75bf8d9a74a4959d398"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:131716d6786ad5e3dc542f5cc6f397ba3339dc0fb87f87ac30e550e8987756af"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5feb91325bbceade6afab43eb3b508c63ee53579fe896c77137ded51c6b6958e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a374cc0b88aa710e8865dc1bd6edb3743c59f27830f0293ab101e4cf3ce9f85"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f820f24b09e3e779fe84c3c456cb4108a7aa639b0d1f02c28046e11bfcd088ed"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d31f0d1671e1534e395f9eb84a68e0fb670e1edb1fe819a9d7f564ae3bc4e53f"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b35b200d6a71b9839a46b9b7fff66b6638bb52fc9658aa58796b0326595d3021"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-win32.whl", hash = "sha256:cace89841c0599d736d3d74a27bc5821288bb47c5441923277afc6059d7fbcb4"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-win32.whl", hash = "sha256:9ca4c0b502ab399ef89248a2c84c54954f77a070f28e546a85e91da627d1301e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:f8102ae93c0bc863b1d41ea0f4499c20a83229f52ed870850892df555187154a"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-win_amd64.whl", hash = "sha256:a9e68c9d88823b274cf1e72f28cb5dc89c990edf430b0bfd3e2fb0785bfeabf4"},
|
||||||
{file = "charset_normalizer-3.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:ed98364e1c262cf5f9363c3eca8c2df37024f52a8fa1180a3610014f26eac51c"},
|
{file = "charset_normalizer-3.4.6-cp311-cp311-win_arm64.whl", hash = "sha256:97d0235baafca5f2b09cf332cc275f021e694e8362c6bb9c96fc9a0eb74fc316"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed97c282ee4f994ef814042423a529df9497e3c666dca19be1d4cd1129dc7ade"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ef7fedc7a6ecbe99969cd09632516738a97eeb8bd7258bf8a0f23114c057dab"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0294916d6ccf2d069727d65973c3a1ca477d68708db25fd758dd28b0827cff54"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4ea868bc28109052790eb2b52a9ab33f3aa7adc02f96673526ff47419490e21"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dc57a0baa3eeedd99fafaef7511b5a6ef4581494e8168ee086031744e2679467"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:836ab36280f21fc1a03c99cd05c6b7af70d2697e374c7af0b61ed271401a72a2"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ed1a9a204f317ef879b32f9af507d47e49cd5e7f8e8d5d96358c98373314fc60"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f1ce721c8a7dfec21fcbdfe04e8f68174183cf4e8188e0645e92aa23985c57ff"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad83b8f9379176c841f8865884f3514d905bcd2a9a3b210eaa446e7d2223e4d"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e28d62a8fc7a1fa411c43bd65e346f3bce9716dc51b897fbe930c5987b402d5"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:a118e2e0b5ae6b0120d5efa5f866e58f2bb826067a646431da4d6a2bdae7950e"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:530d548084c4a9f7a16ed4a294d459b4f229db50df689bfe92027452452943a0"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:754f96058e61a5e22e91483f823e07df16416ce76afa4ebf306f8e1d1296d43f"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30f445ae60aad5e1f8bdbb3108e39f6fbc09f4ea16c815c66578878325f8f15a"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0c300cefd9b0970381a46394902cd18eaf2aa00163f999590ace991989dcd0fc"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ac2393c73378fea4e52aa56285a3d64be50f1a12395afef9cce47772f60334c2"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c108f8619e504140569ee7de3f97d234f0fbae338a7f9f360455071ef9855a95"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:90ca27cd8da8118b18a52d5f547859cc1f8354a00cd1e8e5120df3e30d6279e5"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d1028de43596a315e2720a9849ee79007ab742c06ad8b45a50db8cdb7ed4a82a"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e5a94886bedca0f9b78fecd6afb6629142fd2605aa70a125d49f4edc6037ee6"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:19092dde50335accf365cce21998a1c6dd8eafd42c7b226eb54b2747cdce2fac"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:695f5c2823691a25f17bc5d5ffe79fa90972cc34b002ac6c843bb8a1720e950d"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4354e401eb6dab9aed3c7b4030514328a6c748d05e1c3e19175008ca7de84fb1"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:231d4da14bcd9301310faf492051bee27df11f2bc7549bc0bb41fef11b82daa2"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a68766a3c58fde7f9aaa22b3786276f62ab2f594efb02d0a1421b6282e852e98"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a056d1ad2633548ca18ffa2f85c202cfb48b68615129143915b8dc72a806a923"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-win32.whl", hash = "sha256:1827734a5b308b65ac54e86a618de66f935a4f63a8a462ff1e19a6788d6c2262"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-win32.whl", hash = "sha256:c2274ca724536f173122f36c98ce188fd24ce3dad886ec2b7af859518ce008a4"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:728c6a963dfab66ef865f49286e45239384249672cd598576765acc2a640a636"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-win_amd64.whl", hash = "sha256:c8ae56368f8cc97c7e40a7ee18e1cedaf8e780cd8bc5ed5ac8b81f238614facb"},
|
||||||
{file = "charset_normalizer-3.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:75dfd1afe0b1647449e852f4fb428195a7ed0588947218f7ba929f6538487f02"},
|
{file = "charset_normalizer-3.4.6-cp312-cp312-win_arm64.whl", hash = "sha256:899d28f422116b08be5118ef350c292b36fc15ec2daeb9ea987c89281c7bb5c4"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11afb56037cbc4b1555a34dd69151e8e069bee82e613a73bef6e714ce733585f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423fb7e748a08f854a08a222b983f4df1912b1daedce51a72bd24fe8f26a1843"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d73beaac5e90173ac3deb9928a74763a6d230f494e4bfb422c217a0ad8e629bf"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d60377dce4511655582e300dc1e5a5f24ba0cb229005a1d5c8d0cb72bb758ab8"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:530e8cebeea0d76bdcf93357aa5e41336f48c3dc709ac52da2bb167c5b8271d9"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:a26611d9987b230566f24a0a125f17fe0de6a6aff9f25c9f564aaa2721a5fb88"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:34315ff4fc374b285ad7f4a0bf7dcbfe769e1b104230d40f49f700d4ab6bbd84"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ddd609f9e1af8c7bd6e2aca279c931aefecd148a14402d4e368f3171769fd"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:80d0a5615143c0b3225e5e3ef22c8d5d51f3f72ce0ea6fb84c943546c7b25b6c"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:92734d4d8d187a354a556626c221cd1a892a4e0802ccb2af432a1d85ec012194"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:613f19aa6e082cf96e17e3ffd89383343d0d589abda756b7764cf78361fd41dc"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2b1a63e8224e401cafe7739f77efd3f9e7f5f2026bda4aead8e59afab537784f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6cceb5473417d28edd20c6c984ab6fee6c6267d38d906823ebfe20b03d607dc2"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-win32.whl", hash = "sha256:d7de2637729c67d67cf87614b566626057e95c303bc0a55ffe391f5205e7003d"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-win_amd64.whl", hash = "sha256:572d7c822caf521f0525ba1bce1a622a0b85cf47ffbdae6c9c19e3b5ac3c4389"},
|
||||||
{file = "charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36"},
|
{file = "charset_normalizer-3.4.6-cp313-cp313-win_arm64.whl", hash = "sha256:a4474d924a47185a06411e0064b803c68be044be2d60e50e8bddcc2649957c1f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9cc6e6d9e571d2f863fa77700701dae73ed5f78881efc8b3f9a4398772ff53e8"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5960d965e67165d75b7c7ffc60a83ec5abfc5c11b764ec13ea54fbef8b4421"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b3694e3f87f8ac7ce279d4355645b3c878d24d1424581b46282f24b92f5a4ae2"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d11595abf8dd942a77883a39d81433739b287b6aa71620f15164f8096221b30"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7bda6eebafd42133efdca535b04ccb338ab29467b3f7bf79569883676fc628db"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:bbc8c8650c6e51041ad1be191742b8b421d05bbd3410f43fa2a00c8db87678e8"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22c6f0c2fbc31e76c3b8a86fba1a56eda6166e238c29cdd3d14befdb4a4e4815"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7edbed096e4a4798710ed6bc75dcaa2a21b68b6c356553ac4823c3658d53743a"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:7f9019c9cb613f084481bd6a100b12e1547cf2efe362d873c2e31e4035a6fa43"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:58c948d0d086229efc484fe2f30c2d382c86720f55cd9bc33591774348ad44e0"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:419a9d91bd238052642a51938af8ac05da5b3343becde08d5cdeab9046df9ee1"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5273b9f0b5835ff0350c0828faea623c68bfa65b792720c453e22b25cc72930f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0e901eb1049fdb80f5bd11ed5ea1e498ec423102f7a9b9e4645d5b8204ff2815"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-win32.whl", hash = "sha256:b4ff1d35e8c5bd078be89349b6f3a845128e685e751b6ea1169cf2160b344c4d"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-win_amd64.whl", hash = "sha256:74119174722c4349af9708993118581686f343adc1c8c9c007d59be90d077f3f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314-win_arm64.whl", hash = "sha256:e5bcc1a1ae744e0bb59641171ae53743760130600da8db48cbb6e4918e186e4e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e22d1059b951e7ae7c20ef6b06afd10fb95e3c41bf3c4fbc874dba113321c193"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ad8faf8df23f0378c6d527d8b0b15ea4a2e23c89376877c598c4870d1b2c7866"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afca7f78067dd27c2b848f1b234623d26b87529296c6c5652168cc1954f2f3b2"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f5ea69428fa1b49573eef0cc44a1d43bebd45ad0c611eb7d7eac760c7ae771bc"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ec56a2266f32bc06ed3c3e2a8f58417ce02f7e0356edc89786e52db13c593c98"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:06a7e86163334edfc5d20fe104db92fcd666e5a5df0977cb5680a506fe26cc8e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b970382e4a36bed897c19f310f31d7d13489c11b4f468ddfba42d41cddfb918"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e1f6e2f00a6b8edb562826e4632e26d063ac10307e80f7461f7de3ad8ef3f077"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:573ef5814c4b7c0d59a7710aa920eaaaef383bd71626aa420fba27b5cab92e8d"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b52c68d64c1878818687a473a10547b3292e82b6f6fe483808fb1468e2f52f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-manylinux_2_31_armv7l.whl", hash = "sha256:50bcbca6603c06a1dcc7b056ed45c37715fb5d2768feb3bcd37d2313c587a5b9"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:7504e9b7dc05f99a9bbb4525c67a2c155073b44d720470a148b34166a69c054e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1f2da5cbb9becfcd607757a169e38fb82aa5fd86fae6653dea716e7b613fe2cf"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:172985e4ff804a7ad08eebec0a1640ece87ba5041d565fff23c8f99c1f389484"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc1c64934b8faf7584924143eb9db4770bbdb16659626e1a1a4d9efbcb68d947"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4be9f4830ba8741527693848403e2c457c16e499100963ec711b1c6f2049b7c7"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:ae8b03427410731469c4033934cf473426faff3e04b69d2dfb64a4281a3719f8"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:79090741d842f564b1b2827c0b82d846405b744d31e84f18d7a7b41c20e473ff"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b3e71afc578b98512bfe7bdb822dd6bc57d4b0093b4b6e5487c1e96ad4ace242"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:87725cfb1a4f1f8c2fc9890ae2f42094120f4b44db9360be5d99a4c6b0e03a9e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:4b8551b6e6531e156db71193771c93bda78ffc4d1e6372517fe58ad3b91e4659"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fcce033e4021347d80ed9c66dcf1e7b1546319834b74445f561d2e2221de5659"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:65b3c403a5b6b8034b655e7385de4f72b7b244869a22b32d4030b99a60593eca"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ca0276464d148c72defa8bb4390cce01b4a0e425f3b50d1435aa6d7a18107602"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8ce11cd4d62d11166f2b441e30ace226c19a3899a7cf0796f668fba49a9fb123"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:197c1a244a274bb016dd8b79204850144ef77fe81c5b797dc389327adb552407"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-win32.whl", hash = "sha256:66dee73039277eb35380d1b82cccc69cc82b13a66f9f4a18da32d573acf02b7c"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-win32.whl", hash = "sha256:2a24157fa36980478dd1770b585c0f30d19e18f4fb0c47c13aa568f871718579"},
|
||||||
{file = "charset_normalizer-3.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:d29dd9c016f2078b43d0c357511e87eee5b05108f3dd603423cb389b89813969"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-win_amd64.whl", hash = "sha256:cd5e2801c89992ed8c0a3f0293ae83c159a60d9a5d685005383ef4caca77f2c4"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:259cd1ca995ad525f638e131dbcc2353a586564c038fc548a3fe450a91882139"},
|
{file = "charset_normalizer-3.4.6-cp314-cp314t-win_arm64.whl", hash = "sha256:47955475ac79cc504ef2704b192364e51d0d473ad452caedd0002605f780101c"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a28afb04baa55abf26df544e3e5c6534245d3daa5178bc4a8eeb48202060d0e"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:659a1e1b500fac8f2779dd9e1570464e012f43e580371470b45277a27baa7532"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ff95a9283de8a457e6b12989de3f9f5193430f375d64297d323a615ea52cbdb3"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f61aa92e4aad0be58eb6eb4e0c21acf32cf8065f4b2cae5665da756c4ceef982"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:708c7acde173eedd4bfa4028484426ba689d2103b28588c513b9db2cd5ecde9c"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f50498891691e0864dc3da965f340fada0771f6142a378083dc4608f4ea513e2"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa92ec1102eaff840ccd1021478af176a831f1bccb08e526ce844b7ddda85c22"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bf625105bb9eef28a56a943fec8c8a98aeb80e7d7db99bd3c388137e6eb2d237"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:5fea359734b140d0d6741189fea5478c6091b54ffc69d7ce119e0a05637d8c99"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2bd9d128ef93637a5d7a6af25363cf5dec3fa21cf80e68055aad627f280e8afa"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e545b51da9f9af5c67815ca0eb40676c0f016d0b0381c86f20451e35696c5f95"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-manylinux_2_31_armv7l.whl", hash = "sha256:d08ec48f0a1c48d75d0356cea971921848fb620fdeba805b28f937e90691209f"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:30987f4a8ed169983f93e1be8ffeea5214a779e27ed0b059835c7afe96550ad7"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1ed80ff870ca6de33f4d953fda4d55654b9a2b340ff39ab32fa3adbcd718f264"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:149ec69866c3d6c2fb6f758dbc014ecb09f30b35a5ca90b6a8a2d4e54e18fdfe"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f98059e4fcd3e3e4e2d632b7cf81c2faae96c43c60b569e9c621468082f1d104"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:530beedcec9b6e027e7a4b6ce26eed36678aa39e17da85e6e03d7bd9e8e9d7c9"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:ab30e5e3e706e3063bc6de96b118688cb10396b70bb9864a430f67df98c61ecc"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:14498a429321de554b140013142abe7608f9d8ccc04d7baf2ad60498374aefa2"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:d5f5d1e9def3405f60e3ca8232d56f35c98fb7bf581efcc60051ebf53cb8b611"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2820a98460c83663dd8ec015d9ddfd1e4879f12e06bb7d0500f044fb477d2770"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:461598cd852bfa5a61b09cae2b1c02e2efcd166ee5516e243d540ac24bfa68a7"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:aa2f963b4da26daf46231d9b9e0e2c9408a751f8f0d0f44d2de56d3caf51d294"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:71be7e0e01753a89cf024abf7ecb6bca2c81738ead80d43004d9b5e3f1244e64"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-win32.whl", hash = "sha256:82cc7c2ad42faec8b574351f8bc2a0c049043893853317bd9bb309f5aba6cb5a"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:df01808ee470038c3f8dc4f48620df7225c49c2d6639e38f96e6d6ac6e6f7b0e"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:92263f7eca2f4af326cd20de8d16728d2602f7cfea02e790dcde9d83c365d7cc"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-win32.whl", hash = "sha256:69dd852c2f0ad631b8b60cfbe25a28c0058a894de5abb566619c205ce0550eae"},
|
||||||
{file = "charset_normalizer-3.4.5-cp39-cp39-win_arm64.whl", hash = "sha256:014837af6fabf57121b6254fa8ade10dceabc3528b27b721a64bbc7b8b1d4eb4"},
|
{file = "charset_normalizer-3.4.6-cp38-cp38-win_amd64.whl", hash = "sha256:517ad0e93394ac532745129ceabdf2696b609ec9f87863d337140317ebce1c14"},
|
||||||
{file = "charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0"},
|
{file = "charset_normalizer-3.4.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31215157227939b4fb3d740cd23fe27be0439afef67b785a1eb78a3ae69cba9e"},
|
||||||
{file = "charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644"},
|
{file = "charset_normalizer-3.4.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecbbd45615a6885fe3240eb9db73b9e62518b611850fdf8ab08bd56de7ad2b17"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c45a03a4c69820a399f1dda9e1d8fbf3562eda46e7720458180302021b08f778"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e8aeb10fcbe92767f0fa69ad5a72deca50d0dca07fbde97848997d778a50c9fe"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54fae94be3d75f3e573c9a1b5402dc593de19377013c9a0e4285e3d402dd3a2a"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:2f7fdd9b6e6c529d6a2501a2d36b240109e78a8ceaef5687cfcfa2bbe671d297"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d1d02209e06550bdaef34af58e041ad71b88e624f5d825519da3a3308e22687"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bc5f0687d796c05b1e28ab0d38a50e6309906ee09375dd3aff6a9c09dd6e8f4"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ee4ec14bc1680d6b0afab9aea2ef27e26d2024f18b24a2d7155a52b60da7e833"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d1a2ee9c1499fc8f86f4521f27a973c914b211ffa87322f4ee33bb35392da2c5"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:48696db7f18afb80a068821504296eb0787d9ce239b91ca15059d1d3eaacf13b"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4f41da960b196ea355357285ad1316a00099f22d0929fe168343b99b254729c9"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:802168e03fba8bbc5ce0d866d589e4b1ca751d06edee69f7f3a19c5a9fe6b597"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-win32.whl", hash = "sha256:8761ac29b6c81574724322a554605608a9960769ea83d2c73e396f3df896ad54"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-win_amd64.whl", hash = "sha256:1cf0a70018692f85172348fe06d3a4b63f94ecb055e13a00c644d368eb82e5b8"},
|
||||||
|
{file = "charset_normalizer-3.4.6-cp39-cp39-win_arm64.whl", hash = "sha256:3516bbb8d42169de9e61b8520cbeeeb716f12f4ecfe3fd30a9919aa16c806ca8"},
|
||||||
|
{file = "charset_normalizer-3.4.6-py3-none-any.whl", hash = "sha256:947cf925bc916d90adba35a64c82aace04fa39b46b52d4630ece166655905a69"},
|
||||||
|
{file = "charset_normalizer-3.4.6.tar.gz", hash = "sha256:1ae6b62897110aa7c79ea2f5dd38d1abca6db663687c0b1ad9aed6f6bae3d9d6"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -666,19 +682,19 @@ test = ["pytest (>=6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastapi"
|
name = "fastapi"
|
||||||
version = "0.135.1"
|
version = "0.135.2"
|
||||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["server", "test"]
|
groups = ["server", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "fastapi-0.135.1-py3-none-any.whl", hash = "sha256:46e2fc5745924b7c840f71ddd277382af29ce1cdb7d5eab5bf697e3fb9999c9e"},
|
{file = "fastapi-0.135.2-py3-none-any.whl", hash = "sha256:0af0447d541867e8db2a6a25c23a8c4bd80e2394ac5529bd87501bbb9e240ca5"},
|
||||||
{file = "fastapi-0.135.1.tar.gz", hash = "sha256:d04115b508d936d254cea545b7312ecaa58a7b3a0f84952535b4c9afae7668cd"},
|
{file = "fastapi-0.135.2.tar.gz", hash = "sha256:88a832095359755527b7f63bb4c6bc9edb8329a026189eed83d6c1afcf419d56"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
annotated-doc = ">=0.0.2"
|
annotated-doc = ">=0.0.2"
|
||||||
pydantic = ">=2.7.0"
|
pydantic = ">=2.9.0"
|
||||||
starlette = ">=0.46.0"
|
starlette = ">=0.46.0"
|
||||||
typing-extensions = ">=4.8.0"
|
typing-extensions = ">=4.8.0"
|
||||||
typing-inspection = ">=0.4.2"
|
typing-inspection = ">=0.4.2"
|
||||||
|
|
@ -730,62 +746,62 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fonttools"
|
name = "fonttools"
|
||||||
version = "4.62.0"
|
version = "4.62.1"
|
||||||
description = "Tools to manipulate font files"
|
description = "Tools to manipulate font files"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["main"]
|
groups = ["main"]
|
||||||
files = [
|
files = [
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:62b6a3d0028e458e9b59501cf7124a84cd69681c433570e4861aff4fb54a236c"},
|
{file = "fonttools-4.62.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ad5cca75776cd453b1b035b530e943334957ae152a36a88a320e779d61fc980c"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:966557078b55e697f65300b18025c54e872d7908d1899b7314d7c16e64868cb2"},
|
{file = "fonttools-4.62.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b3ae47e8636156a9accff64c02c0924cbebad62854c4a6dbdc110cd5b4b341a"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf34861145b516cddd19b07ae6f4a61ea1c6326031b960ec9ddce8ee815e888"},
|
{file = "fonttools-4.62.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b9e288b4da2f64fd6180644221749de651703e8d0c16bd4b719533a3a7d6e3"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e2ff573de2775508c8a366351fb901c4ced5dc6cf2d87dd15c973bedcdd5216"},
|
{file = "fonttools-4.62.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bca7a1c1faf235ffe25d4f2e555246b4750220b38de8261d94ebc5ce8a23c23"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:55b189a1b3033860a38e4e5bd0626c5aa25c7ce9caee7bc784a8caec7a675401"},
|
{file = "fonttools-4.62.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4e0fcf265ad26e487c56cb12a42dffe7162de708762db951e1b3f755319507d"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:825f98cd14907c74a4d0a3f7db8570886ffce9c6369fed1385020febf919abf6"},
|
{file = "fonttools-4.62.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d850f66830a27b0d498ee05adb13a3781637b1826982cd7e2b3789ef0cc71ae"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-win32.whl", hash = "sha256:c858030560f92a054444c6e46745227bfd3bb4e55383c80d79462cd47289e4b5"},
|
{file = "fonttools-4.62.1-cp310-cp310-win32.whl", hash = "sha256:486f32c8047ccd05652aba17e4a8819a3a9d78570eb8a0e3b4503142947880ed"},
|
||||||
{file = "fonttools-4.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bf75eb69330e34ad2a096fac67887102c8537991eb6cac1507fc835bbb70e0a"},
|
{file = "fonttools-4.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:5a648bde915fba9da05ae98856987ca91ba832949a9e2888b48c47ef8b96c5a9"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:196cafef9aeec5258425bd31a4e9a414b2ee0d1557bca184d7923d3d3bcd90f9"},
|
{file = "fonttools-4.62.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:40975849bac44fb0b9253d77420c6d8b523ac4dcdcefeff6e4d706838a5b80f7"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:153afc3012ff8761b1733e8fbe5d98623409774c44ffd88fbcb780e240c11d13"},
|
{file = "fonttools-4.62.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9dde91633f77fa576879a0c76b1d89de373cae751a98ddf0109d54e173b40f14"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13b663fb197334de84db790353d59da2a7288fd14e9be329f5debc63ec0500a5"},
|
{file = "fonttools-4.62.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6acb4109f8bee00fec985c8c7afb02299e35e9c94b57287f3ea542f28bd0b0a7"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:591220d5333264b1df0d3285adbdfe2af4f6a45bbf9ca2b485f97c9f577c49ff"},
|
{file = "fonttools-4.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1c5c25671ce8805e0d080e2ffdeca7f1e86778c5cbfbeae86d7f866d8830517b"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:579f35c121528a50c96bf6fcb6a393e81e7f896d4326bf40e379f1c971603db9"},
|
{file = "fonttools-4.62.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a5d8825e1140f04e6c99bb7d37a9e31c172f3bc208afbe02175339e699c710e1"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:44956b003151d5a289eba6c71fe590d63509267c37e26de1766ba15d9c589582"},
|
{file = "fonttools-4.62.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:268abb1cb221e66c014acc234e872b7870d8b5d4657a83a8f4205094c32d2416"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-win32.whl", hash = "sha256:42c7848fa8836ab92c23b1617c407a905642521ff2d7897fe2bf8381530172f1"},
|
{file = "fonttools-4.62.1-cp311-cp311-win32.whl", hash = "sha256:942b03094d7edbb99bdf1ae7e9090898cad7bf9030b3d21f33d7072dbcb51a53"},
|
||||||
{file = "fonttools-4.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:4da779e8f342a32856075ddb193b2a024ad900bc04ecb744014c32409ae871ed"},
|
{file = "fonttools-4.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:e8514f4924375f77084e81467e63238b095abda5107620f49421c368a6017ed2"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:22bde4dc12a9e09b5ced77f3b5053d96cf10c4976c6ac0dee293418ef289d221"},
|
{file = "fonttools-4.62.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:90365821debbd7db678809c7491ca4acd1e0779b9624cdc6ddaf1f31992bf974"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7199c73b326bad892f1cb53ffdd002128bfd58a89b8f662204fbf1daf8d62e85"},
|
{file = "fonttools-4.62.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12859ff0b47dd20f110804c3e0d0970f7b832f561630cd879969011541a464a9"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d732938633681d6e2324e601b79e93f7f72395ec8681f9cdae5a8c08bc167e72"},
|
{file = "fonttools-4.62.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c125ffa00c3d9003cdaaf7f2c79e6e535628093e14b5de1dccb08859b680936"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:31a804c16d76038cc4e3826e07678efb0a02dc4f15396ea8e07088adbfb2578e"},
|
{file = "fonttools-4.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:149f7d84afca659d1a97e39a4778794a2f83bf344c5ee5134e09995086cc2392"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:090e74ac86e68c20150e665ef8e7e0c20cb9f8b395302c9419fa2e4d332c3b51"},
|
{file = "fonttools-4.62.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0aa72c43a601cfa9273bb1ae0518f1acadc01ee181a6fc60cd758d7fdadffc04"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8f086120e8be9e99ca1288aa5ce519833f93fe0ec6ebad2380c1dee18781f0b5"},
|
{file = "fonttools-4.62.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:19177c8d96c7c36359266e571c5173bcee9157b59cfc8cb0153c5673dc5a3a7d"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-win32.whl", hash = "sha256:37a73e5e38fd05c637daede6ffed5f3496096be7df6e4a3198d32af038f87527"},
|
{file = "fonttools-4.62.1-cp312-cp312-win32.whl", hash = "sha256:a24decd24d60744ee8b4679d38e88b8303d86772053afc29b19d23bb8207803c"},
|
||||||
{file = "fonttools-4.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:658ab837c878c4d2a652fcbb319547ea41693890e6434cf619e66f79387af3b8"},
|
{file = "fonttools-4.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e7863e10b3de72376280b515d35b14f5eeed639d1aa7824f4cf06779ec65e42"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:274c8b8a87e439faf565d3bcd3f9f9e31bca7740755776a4a90a4bfeaa722efa"},
|
{file = "fonttools-4.62.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c22b1014017111c401469e3acc5433e6acf6ebcc6aa9efb538a533c800971c79"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93e27131a5a0ae82aaadcffe309b1bae195f6711689722af026862bede05c07c"},
|
{file = "fonttools-4.62.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:68959f5fc58ed4599b44aad161c2837477d7f35f5f79402d97439974faebfebe"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83c6524c5b93bad9c2939d88e619fedc62e913c19e673f25d5ab74e7a5d074e5"},
|
{file = "fonttools-4.62.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef46db46c9447103b8f3ff91e8ba009d5fe181b1920a83757a5762551e32bb68"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:106aec9226f9498fc5345125ff7200842c01eda273ae038f5049b0916907acee"},
|
{file = "fonttools-4.62.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6706d1cb1d5e6251a97ad3c1b9347505c5615c112e66047abbef0f8545fa30d1"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15d86b96c79013320f13bc1b15f94789edb376c0a2d22fb6088f33637e8dfcbc"},
|
{file = "fonttools-4.62.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e7abd2b1e11736f58c1de27819e1955a53267c21732e78243fa2fa2e5c1e069"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f16c07e5250d5d71d0f990a59460bc5620c3cc456121f2cfb5b60475699905f"},
|
{file = "fonttools-4.62.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:403d28ce06ebfc547fbcb0cb8b7f7cc2f7a2d3e1a67ba9a34b14632df9e080f9"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-win32.whl", hash = "sha256:d31558890f3fa00d4f937d12708f90c7c142c803c23eaeb395a71f987a77ebe3"},
|
{file = "fonttools-4.62.1-cp313-cp313-win32.whl", hash = "sha256:93c316e0f5301b2adbe6a5f658634307c096fd5aae60a5b3412e4f3e1728ab24"},
|
||||||
{file = "fonttools-4.62.0-cp313-cp313-win_amd64.whl", hash = "sha256:6826a5aa53fb6def8a66bf423939745f415546c4e92478a7c531b8b6282b6c3b"},
|
{file = "fonttools-4.62.1-cp313-cp313-win_amd64.whl", hash = "sha256:7aa21ff53e28a9c2157acbc44e5b401149d3c9178107130e82d74ceb500e5056"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:4fa5a9c716e2f75ef34b5a5c2ca0ee4848d795daa7e6792bf30fd4abf8993449"},
|
{file = "fonttools-4.62.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fa1d16210b6b10a826d71bed68dd9ec24a9e218d5a5e2797f37c573e7ec215ca"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:625f5cbeb0b8f4e42343eaeb4bc2786718ddd84760a2f5e55fdd3db049047c00"},
|
{file = "fonttools-4.62.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:aa69d10ed420d8121118e628ad47d86e4caa79ba37f968597b958f6cceab7eca"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6247e58b96b982709cd569a91a2ba935d406dccf17b6aa615afaed37ac3856aa"},
|
{file = "fonttools-4.62.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd13b7999d59c5eb1c2b442eb2d0c427cb517a0b7a1f5798fc5c9e003f5ff782"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:840632ea9c1eab7b7f01c369e408c0721c287dfd7500ab937398430689852fd1"},
|
{file = "fonttools-4.62.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8d337fdd49a79b0d51c4da87bc38169d21c3abbf0c1aa9367eff5c6656fb6dae"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:28a9ea2a7467a816d1bec22658b0cce4443ac60abac3e293bdee78beb74588f3"},
|
{file = "fonttools-4.62.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d241cdc4a67b5431c6d7f115fdf63335222414995e3a1df1a41e1182acd4bcc7"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5ae611294f768d413949fd12693a8cba0e6332fbc1e07aba60121be35eac68d0"},
|
{file = "fonttools-4.62.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c05557a78f8fa514da0f869556eeda40887a8abc77c76ee3f74cf241778afd5a"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-win32.whl", hash = "sha256:273acb61f316d07570a80ed5ff0a14a23700eedbec0ad968b949abaa4d3f6bb5"},
|
{file = "fonttools-4.62.1-cp314-cp314-win32.whl", hash = "sha256:49a445d2f544ce4a69338694cad575ba97b9a75fff02720da0882d1a73f12800"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314-win_amd64.whl", hash = "sha256:a5f974006d14f735c6c878fc4b117ad031dc93638ddcc450ca69f8fd64d5e104"},
|
{file = "fonttools-4.62.1-cp314-cp314-win_amd64.whl", hash = "sha256:1eecc128c86c552fb963fe846ca4e011b1be053728f798185a1687502f6d398e"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0361a7d41d86937f1f752717c19f719d0fde064d3011038f9f19bdf5fc2f5c95"},
|
{file = "fonttools-4.62.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:1596aeaddf7f78e21e68293c011316a25267b3effdaccaf4d59bc9159d681b82"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4108c12773b3c97aa592311557c405d5b4fc03db2b969ed928fcf68e7b3c887"},
|
{file = "fonttools-4.62.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:8f8fca95d3bb3208f59626a4b0ea6e526ee51f5a8ad5d91821c165903e8d9260"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b448075f32708e8fb377fe7687f769a5f51a027172c591ba9a58693631b077a8"},
|
{file = "fonttools-4.62.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee91628c08e76f77b533d65feb3fbe6d9dad699f95be51cf0d022db94089cdc4"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5f1fa8cc9f1a56a3e33ee6b954d6d9235e6b9d11eb7a6c9dfe2c2f829dc24db"},
|
{file = "fonttools-4.62.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f37df1cac61d906e7b836abe356bc2f34c99d4477467755c216b72aa3dc748b"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f8c8ea812f82db1e884b9cdb663080453e28f0f9a1f5027a5adb59c4cc8d38d1"},
|
{file = "fonttools-4.62.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:92bb00a947e666169c99b43753c4305fc95a890a60ef3aeb2a6963e07902cc87"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:03c6068adfdc67c565d217e92386b1cdd951abd4240d65180cec62fa74ba31b2"},
|
{file = "fonttools-4.62.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bdfe592802ef939a0e33106ea4a318eeb17822c7ee168c290273cbd5fabd746c"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-win32.whl", hash = "sha256:d28d5baacb0017d384df14722a63abe6e0230d8ce642b1615a27d78ffe3bc983"},
|
{file = "fonttools-4.62.1-cp314-cp314t-win32.whl", hash = "sha256:b820fcb92d4655513d8402d5b219f94481c4443d825b4372c75a2072aa4b357a"},
|
||||||
{file = "fonttools-4.62.0-cp314-cp314t-win_amd64.whl", hash = "sha256:3f9e20c4618f1e04190c802acae6dc337cb6db9fa61e492fd97cd5c5a9ff6d07"},
|
{file = "fonttools-4.62.1-cp314-cp314t-win_amd64.whl", hash = "sha256:59b372b4f0e113d3746b88985f1c796e7bf830dd54b28374cd85c2b8acd7583e"},
|
||||||
{file = "fonttools-4.62.0-py3-none-any.whl", hash = "sha256:75064f19a10c50c74b336aa5ebe7b1f89fd0fb5255807bfd4b0c6317098f4af3"},
|
{file = "fonttools-4.62.1-py3-none-any.whl", hash = "sha256:7487782e2113861f4ddcc07c3436450659e3caa5e470b27dc2177cade2d8e7fd"},
|
||||||
{file = "fonttools-4.62.0.tar.gz", hash = "sha256:0dc477c12b8076b4eb9af2e440421b0433ffa9e1dcb39e0640a6c94665ed1098"},
|
{file = "fonttools-4.62.1.tar.gz", hash = "sha256:e54c75fd6041f1122476776880f7c3c3295ffa31962dc6ebe2543c00dca58b5d"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
|
@ -874,6 +890,28 @@ files = [
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
numpy = ">=1.21.2"
|
numpy = ">=1.21.2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpcore"
|
||||||
|
version = "1.0.9"
|
||||||
|
description = "A minimal low-level HTTP client."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
groups = ["test"]
|
||||||
|
files = [
|
||||||
|
{file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
|
||||||
|
{file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
certifi = "*"
|
||||||
|
h11 = ">=0.16"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
asyncio = ["anyio (>=4.0,<5.0)"]
|
||||||
|
http2 = ["h2 (>=3,<5)"]
|
||||||
|
socks = ["socksio (==1.*)"]
|
||||||
|
trio = ["trio (>=0.22.0,<1.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httptools"
|
name = "httptools"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
|
|
@ -927,13 +965,38 @@ files = [
|
||||||
{file = "httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9"},
|
{file = "httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpx"
|
||||||
|
version = "0.28.1"
|
||||||
|
description = "The next generation HTTP client."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
groups = ["test"]
|
||||||
|
files = [
|
||||||
|
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
|
||||||
|
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
anyio = "*"
|
||||||
|
certifi = "*"
|
||||||
|
httpcore = "==1.*"
|
||||||
|
idna = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
||||||
|
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
|
||||||
|
http2 = ["h2 (>=3,<5)"]
|
||||||
|
socks = ["socksio (==1.*)"]
|
||||||
|
zstd = ["zstandard (>=0.18.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.11"
|
version = "3.11"
|
||||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
groups = ["docs", "server", "test"]
|
groups = ["agent", "docs", "server", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"},
|
{file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"},
|
||||||
{file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"},
|
{file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"},
|
||||||
|
|
@ -1399,14 +1462,14 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "narwhals"
|
name = "narwhals"
|
||||||
version = "2.18.0"
|
version = "2.18.1"
|
||||||
description = "Extremely lightweight compatibility layer between dataframe libraries"
|
description = "Extremely lightweight compatibility layer between dataframe libraries"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.9"
|
||||||
groups = ["main"]
|
groups = ["main"]
|
||||||
files = [
|
files = [
|
||||||
{file = "narwhals-2.18.0-py3-none-any.whl", hash = "sha256:68378155ee706ac9c5b25868ef62ecddd62947b6df7801a0a156bc0a615d2d0d"},
|
{file = "narwhals-2.18.1-py3-none-any.whl", hash = "sha256:a0a8bb80205323851338888ba3a12b4f65d352362c8a94be591244faf36504ad"},
|
||||||
{file = "narwhals-2.18.0.tar.gz", hash = "sha256:1de5cee338bc17c338c6278df2c38c0dd4290499fcf70d75e0a51d5f22a6e960"},
|
{file = "narwhals-2.18.1.tar.gz", hash = "sha256:652a1fcc9d432bbf114846688884c215f17eb118aa640b7419295d2f910d2a8b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
|
@ -1476,6 +1539,7 @@ description = "ONNX Runtime is a runtime accelerator for Machine Learning models
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["server", "test"]
|
groups = ["server", "test"]
|
||||||
|
markers = "python_version == \"3.10\""
|
||||||
files = [
|
files = [
|
||||||
{file = "onnxruntime-1.24.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3e6456801c66b095c5cd68e690ca25db970ea5202bd0c5b84a2c3ef7731c5a3c"},
|
{file = "onnxruntime-1.24.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3e6456801c66b095c5cd68e690ca25db970ea5202bd0c5b84a2c3ef7731c5a3c"},
|
||||||
{file = "onnxruntime-1.24.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b2ebc54c6d8281dccff78d4b06e47d4cf07535937584ab759448390a70f4978"},
|
{file = "onnxruntime-1.24.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b2ebc54c6d8281dccff78d4b06e47d4cf07535937584ab759448390a70f4978"},
|
||||||
|
|
@ -1510,6 +1574,48 @@ packaging = "*"
|
||||||
protobuf = "*"
|
protobuf = "*"
|
||||||
sympy = "*"
|
sympy = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "onnxruntime"
|
||||||
|
version = "1.24.4"
|
||||||
|
description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.11"
|
||||||
|
groups = ["server", "test"]
|
||||||
|
markers = "python_version >= \"3.11\""
|
||||||
|
files = [
|
||||||
|
{file = "onnxruntime-1.24.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0bdfce8e9a6497cec584aab407b71bf697dac5e1b7b7974adc50bf7533bdb3a2"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:046ff290045a387676941a02a8ae5c3ebec6b4f551ae228711968c4a69d8f6b7"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e54ad52e61d2d4618dcff8fa1480ac66b24ee2eab73331322db1049f11ccf330"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b43b63eb24a2bc8fc77a09be67587a570967a412cccb837b6245ccb546691153"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp311-cp311-win_arm64.whl", hash = "sha256:e26478356dba25631fb3f20112e345f8e8bf62c499bb497e8a559f7d69cf7e7b"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cad1c2b3f455c55678ab2a8caa51fb420c25e6e3cf10f4c23653cdabedc8de78"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a5c5a544b22f90859c88617ecb30e161ee3349fcc73878854f43d77f00558b5"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d640eb9f3782689b55cfa715094474cd5662f2f137be6a6f847a594b6e9705c"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp312-cp312-win_amd64.whl", hash = "sha256:535b29475ca42b593c45fbb2152fbf1cdf3f287315bf650e6a724a0a1d065cdb"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp312-cp312-win_arm64.whl", hash = "sha256:e6214096e14b7b52e3bee1903dc12dc7ca09cb65e26664668a4620cc5e6f9a90"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e99a48078baaefa2b50fe5836c319499f71f13f76ed32d0211f39109147a49e0"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4aaed1e5e1aaacf2343c838a30a7c3ade78f13eeb16817411f929d04040a13"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e30c972bc02e072911aabb6891453ec73795386c0af2b761b65444b8a4c4745f"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313-win_amd64.whl", hash = "sha256:3b6ba8b0181a3aa88edab00eb01424ffc06f42e71095a91186c2249415fcff93"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313-win_arm64.whl", hash = "sha256:71d6a5c1821d6e8586a024000ece458db8f2fc0ecd050435d45794827ce81e19"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1700f559c8086d06b2a4d5de51e62cb4ff5e2631822f71a36db8c72383db71ee"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c74e268dc808e61e63784d43f9ddcdaf50a776c2819e8bd1d1b11ef64bf7e36"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:fbff2a248940e3398ae78374c5a839e49a2f39079b488bc64439fa0ec327a3e4"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2b7969e72d8cb53ffc88ab6d49dd5e75c1c663bda7be7eb0ece192f127343d1"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14ed1f197fab812b695a5eaddb536c635e58a2fbbe50a517c78f082cc6ce9177"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314-win_amd64.whl", hash = "sha256:311e309f573bf3c12aa5723e23823077f83d5e412a18499d4485c7eb41040858"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f0b910e86b759a4732663ec61fd57ac42ee1b0066f68299de164220b660546d"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa12ddc54c9c4594073abcaa265cd9681e95fb89dae982a6f508a794ca42e661"},
|
||||||
|
{file = "onnxruntime-1.24.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1cc6a518255f012134bc791975a6294806be9a3b20c4a54cca25194c90cf731"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
flatbuffers = "*"
|
||||||
|
numpy = ">=1.21.6"
|
||||||
|
packaging = "*"
|
||||||
|
protobuf = "*"
|
||||||
|
sympy = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "26.0"
|
version = "26.0"
|
||||||
|
|
@ -1803,20 +1909,20 @@ testing = ["coverage", "pytest", "pytest-benchmark"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "protobuf"
|
name = "protobuf"
|
||||||
version = "7.34.0"
|
version = "7.34.1"
|
||||||
description = ""
|
description = ""
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["server", "test"]
|
groups = ["server", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e329966799f2c271d5e05e236459fe1cbfdb8755aaa3b0914fa60947ddea408"},
|
{file = "protobuf-7.34.1-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8b2cc79c4d8f62b293ad9b11ec3aebce9af481fa73e64556969f7345ebf9fc7"},
|
||||||
{file = "protobuf-7.34.0-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:9d7a5005fb96f3c1e64f397f91500b0eb371b28da81296ae73a6b08a5b76cdd6"},
|
{file = "protobuf-7.34.1-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:5185e0e948d07abe94bb76ec9b8416b604cfe5da6f871d67aad30cbf24c3110b"},
|
||||||
{file = "protobuf-7.34.0-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:4a72a8ec94e7a9f7ef7fe818ed26d073305f347f8b3b5ba31e22f81fd85fca02"},
|
{file = "protobuf-7.34.1-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:403b093a6e28a960372b44e5eb081775c9b056e816a8029c61231743d63f881a"},
|
||||||
{file = "protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:964cf977e07f479c0697964e83deda72bcbc75c3badab506fb061b352d991b01"},
|
{file = "protobuf-7.34.1-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ff40ce8cd688f7265326b38d5a1bed9bfdf5e6723d49961432f83e21d5713e4"},
|
||||||
{file = "protobuf-7.34.0-cp310-abi3-win32.whl", hash = "sha256:f791ec509707a1d91bd02e07df157e75e4fb9fbdad12a81b7396201ec244e2e3"},
|
{file = "protobuf-7.34.1-cp310-abi3-win32.whl", hash = "sha256:34b84ce27680df7cca9f231043ada0daa55d0c44a2ddfaa58ec1d0d89d8bf60a"},
|
||||||
{file = "protobuf-7.34.0-cp310-abi3-win_amd64.whl", hash = "sha256:9f9079f1dde4e32342ecbd1c118d76367090d4aaa19da78230c38101c5b3dd40"},
|
{file = "protobuf-7.34.1-cp310-abi3-win_amd64.whl", hash = "sha256:e97b55646e6ce5cbb0954a8c28cd39a5869b59090dfaa7df4598a7fba869468c"},
|
||||||
{file = "protobuf-7.34.0-py3-none-any.whl", hash = "sha256:e3b914dd77fa33fa06ab2baa97937746ab25695f389869afdf03e81f34e45dc7"},
|
{file = "protobuf-7.34.1-py3-none-any.whl", hash = "sha256:bb3812cd53aefea2b028ef42bd780f5b96407247f20c6ef7c679807e9d188f11"},
|
||||||
{file = "protobuf-7.34.0.tar.gz", hash = "sha256:3871a3df67c710aaf7bb8d214cc997342e63ceebd940c8c7fc65c9b3d697591a"},
|
{file = "protobuf-7.34.1.tar.gz", hash = "sha256:9ce42245e704cc5027be797c1db1eb93184d44d1cdd71811fb2d9b25ad541280"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -2014,14 +2120,14 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.19.2"
|
version = "2.20.0"
|
||||||
description = "Pygments is a syntax highlighting package written in Python."
|
description = "Pygments is a syntax highlighting package written in Python."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.9"
|
||||||
groups = ["docs", "test"]
|
groups = ["docs", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
|
{file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"},
|
||||||
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
|
{file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
|
@ -2133,14 +2239,14 @@ six = ">=1.5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-discovery"
|
name = "python-discovery"
|
||||||
version = "1.1.3"
|
version = "1.2.1"
|
||||||
description = "Python interpreter discovery"
|
description = "Python interpreter discovery"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
groups = ["test"]
|
groups = ["test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "python_discovery-1.1.3-py3-none-any.whl", hash = "sha256:90e795f0121bc84572e737c9aa9966311b9fde44ffb88a5953b3ec9b31c6945e"},
|
{file = "python_discovery-1.2.1-py3-none-any.whl", hash = "sha256:b6a957b24c1cd79252484d3566d1b49527581d46e789aaf43181005e56201502"},
|
||||||
{file = "python_discovery-1.1.3.tar.gz", hash = "sha256:7acca36e818cd88e9b2ba03e045ad7e93e1713e29c6bbfba5d90202310b7baa5"},
|
{file = "python_discovery-1.2.1.tar.gz", hash = "sha256:180c4d114bff1c32462537eac5d6a332b768242b76b69c0259c7d14b1b680c9e"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
@ -2397,25 +2503,25 @@ typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests"
|
name = "requests"
|
||||||
version = "2.32.5"
|
version = "2.33.1"
|
||||||
description = "Python HTTP for Humans."
|
description = "Python HTTP for Humans."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.10"
|
||||||
groups = ["docs"]
|
groups = ["agent", "docs"]
|
||||||
files = [
|
files = [
|
||||||
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
|
{file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"},
|
||||||
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
|
{file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = ">=2017.4.17"
|
certifi = ">=2023.5.7"
|
||||||
charset_normalizer = ">=2,<4"
|
charset_normalizer = ">=2,<4"
|
||||||
idna = ">=2.5,<4"
|
idna = ">=2.5,<4"
|
||||||
urllib3 = ">=1.21.1,<3"
|
urllib3 = ">=1.26,<3"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rpds-py"
|
name = "rpds-py"
|
||||||
|
|
@ -2608,14 +2714,14 @@ test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sigmf"
|
name = "sigmf"
|
||||||
version = "1.7.0"
|
version = "1.7.2"
|
||||||
description = "Easily interact with Signal Metadata Format (SigMF) recordings."
|
description = "Easily interact with Signal Metadata Format (SigMF) recordings."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
groups = ["main"]
|
groups = ["main"]
|
||||||
files = [
|
files = [
|
||||||
{file = "sigmf-1.7.0-py3-none-any.whl", hash = "sha256:6f0b15f1d93308da485d0ddbcdd85ca5d414f624d3f3524965d8c6701c9b8012"},
|
{file = "sigmf-1.7.2-py3-none-any.whl", hash = "sha256:6599b95e8bd3ac2c568b8ec46c312a77b80868cbda79d729234f396d2194d3d8"},
|
||||||
{file = "sigmf-1.7.0.tar.gz", hash = "sha256:fec920bd2e7d3b798a46ef0bedc6f7a3ca1edfa165c7dde1b2e3d6a9dd14e9be"},
|
{file = "sigmf-1.7.2.tar.gz", hash = "sha256:5f80f7127539358c7528ccf26e0ac5b3c268ecaeb69a921542e8ff71d0c85346"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
@ -2845,14 +2951,14 @@ test = ["pytest"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "starlette"
|
name = "starlette"
|
||||||
version = "0.52.1"
|
version = "1.0.0"
|
||||||
description = "The little ASGI library that shines."
|
description = "The little ASGI library that shines."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["docs", "server", "test"]
|
groups = ["docs", "server", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74"},
|
{file = "starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b"},
|
||||||
{file = "starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933"},
|
{file = "starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
@ -2882,60 +2988,60 @@ dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tomli"
|
name = "tomli"
|
||||||
version = "2.4.0"
|
version = "2.4.1"
|
||||||
description = "A lil' TOML parser"
|
description = "A lil' TOML parser"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
groups = ["dev", "docs", "test"]
|
groups = ["dev", "docs", "test"]
|
||||||
markers = "python_version == \"3.10\""
|
markers = "python_version == \"3.10\""
|
||||||
files = [
|
files = [
|
||||||
{file = "tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867"},
|
{file = "tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9"},
|
{file = "tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95"},
|
{file = "tomli-2.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76"},
|
{file = "tomli-2.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d"},
|
{file = "tomli-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576"},
|
{file = "tomli-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a"},
|
{file = "tomli-2.4.1-cp311-cp311-win32.whl", hash = "sha256:ff2983983d34813c1aeb0fa89091e76c3a22889ee83ab27c5eeb45100560c049"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa"},
|
{file = "tomli-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:5ee18d9ebdb417e384b58fe414e8d6af9f4e7a0ae761519fb50f721de398dd4e"},
|
||||||
{file = "tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614"},
|
{file = "tomli-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:c2541745709bad0264b7d4705ad453b76ccd191e64aa6f0fc66b69a293a45ece"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1"},
|
{file = "tomli-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8"},
|
{file = "tomli-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a"},
|
{file = "tomli-2.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1"},
|
{file = "tomli-2.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b"},
|
{file = "tomli-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51"},
|
{file = "tomli-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729"},
|
{file = "tomli-2.4.1-cp312-cp312-win32.whl", hash = "sha256:da25dc3563bff5965356133435b757a795a17b17d01dbc0f42fb32447ddfd917"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da"},
|
{file = "tomli-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:52c8ef851d9a240f11a88c003eacb03c31fc1c9c4ec64a99a0f922b93874fda9"},
|
||||||
{file = "tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3"},
|
{file = "tomli-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:f758f1b9299d059cc3f6546ae2af89670cb1c4d48ea29c3cacc4fe7de3058257"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0"},
|
{file = "tomli-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36d2bd2ad5fb9eaddba5226aa02c8ec3fa4f192631e347b3ed28186d43be6b54"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e"},
|
{file = "tomli-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb0dc4e38e6a1fd579e5d50369aa2e10acfc9cace504579b2faabb478e76941a"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4"},
|
{file = "tomli-2.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7f2c7f2b9ca6bdeef8f0fa897f8e05085923eb091721675170254cbc5b02897"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e"},
|
{file = "tomli-2.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c"},
|
{file = "tomli-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d312ef37c91508b0ab2cee7da26ec0b3ed2f03ce12bd87a588d771ae15dcf82d"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f"},
|
{file = "tomli-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51529d40e3ca50046d7606fa99ce3956a617f9b36380da3b7f0dd3dd28e68cb5"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86"},
|
{file = "tomli-2.4.1-cp313-cp313-win32.whl", hash = "sha256:2190f2e9dd7508d2a90ded5ed369255980a1bcdd58e52f7fe24b8162bf9fedbd"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87"},
|
{file = "tomli-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d65a2fbf9d2f8352685bc1364177ee3923d6baf5e7f43ea4959d7d8bc326a36"},
|
||||||
{file = "tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132"},
|
{file = "tomli-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:4b605484e43cdc43f0954ddae319fb75f04cc10dd80d830540060ee7cd0243cd"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6"},
|
{file = "tomli-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fd0409a3653af6c147209d267a0e4243f0ae46b011aa978b1080359fddc9b6cf"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc"},
|
{file = "tomli-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a120733b01c45e9a0c34aeef92bf0cf1d56cfe81ed9d47d562f9ed591a9828ac"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66"},
|
{file = "tomli-2.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:559db847dc486944896521f68d8190be1c9e719fced785720d2216fe7022b662"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d"},
|
{file = "tomli-2.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01f520d4f53ef97964a240a035ec2a869fe1a37dde002b57ebc4417a27ccd853"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702"},
|
{file = "tomli-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7f94b27a62cfad8496c8d2513e1a222dd446f095fca8987fceef261225538a15"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8"},
|
{file = "tomli-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ede3e6487c5ef5d28634ba3f31f989030ad6af71edfb0055cbbd14189ff240ba"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776"},
|
{file = "tomli-2.4.1-cp314-cp314-win32.whl", hash = "sha256:3d48a93ee1c9b79c04bb38772ee1b64dcf18ff43085896ea460ca8dec96f35f6"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475"},
|
{file = "tomli-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:88dceee75c2c63af144e456745e10101eb67361050196b0b6af5d717254dddf7"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2"},
|
{file = "tomli-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:b8c198f8c1805dc42708689ed6864951fd2494f924149d3e4bce7710f8eb5232"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9"},
|
{file = "tomli-2.4.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4d8fe59808a54658fcc0160ecfb1b30f9089906c50b23bcb4c69eddc19ec2b4"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0"},
|
{file = "tomli-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7008df2e7655c495dd12d2a4ad038ff878d4ca4b81fccaf82b714e07eae4402c"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df"},
|
{file = "tomli-2.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d8591993e228b0c930c4bb0db464bdad97b3289fb981255d6c9a41aedc84b2d"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d"},
|
{file = "tomli-2.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:734e20b57ba95624ecf1841e72b53f6e186355e216e5412de414e3c51e5e3c41"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f"},
|
{file = "tomli-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a650c2dbafa08d42e51ba0b62740dae4ecb9338eefa093aa5c78ceb546fcd5c"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b"},
|
{file = "tomli-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:504aa796fe0569bb43171066009ead363de03675276d2d121ac1a4572397870f"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087"},
|
{file = "tomli-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:b1d22e6e9387bf4739fbe23bfa80e93f6b0373a7f1b96c6227c32bef95a4d7a8"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd"},
|
{file = "tomli-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2c1c351919aca02858f740c6d33adea0c5deea37f9ecca1cc1ef9e884a619d26"},
|
||||||
{file = "tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4"},
|
{file = "tomli-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eab21f45c7f66c13f2a9e0e1535309cee140182a9cdae1e041d02e47291e8396"},
|
||||||
{file = "tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a"},
|
{file = "tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe"},
|
||||||
{file = "tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c"},
|
{file = "tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -2964,14 +3070,14 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tox"
|
name = "tox"
|
||||||
version = "4.49.1"
|
version = "4.52.0"
|
||||||
description = "tox is a generic virtualenv management and test command line tool"
|
description = "tox is a generic virtualenv management and test command line tool"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["test"]
|
groups = ["test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "tox-4.49.1-py3-none-any.whl", hash = "sha256:6dd2d7d4e4fd5895ce4ea20e258fce0d4b81e914b697d116a5ab0365f8303bad"},
|
{file = "tox-4.52.0-py3-none-any.whl", hash = "sha256:624d8ea4a8c6d5e8d168eedf0e318d736fb22e83ca83137d001ac65ffdec46fd"},
|
||||||
{file = "tox-4.49.1.tar.gz", hash = "sha256:4130d02e1d53648d7107d121ed79f69a27b717817c5e9da521d50319dd261212"},
|
{file = "tox-4.52.0.tar.gz", hash = "sha256:6054abf5c8b61d58776fbec991f9bf0d34bb883862beb93d2fe55601ef3977c9"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
@ -2982,6 +3088,7 @@ packaging = ">=26"
|
||||||
platformdirs = ">=4.9.4"
|
platformdirs = ">=4.9.4"
|
||||||
pluggy = ">=1.6"
|
pluggy = ">=1.6"
|
||||||
pyproject-api = ">=1.10"
|
pyproject-api = ">=1.10"
|
||||||
|
python-discovery = ">=1.2.1"
|
||||||
tomli = {version = ">=2.4", markers = "python_version < \"3.11\""}
|
tomli = {version = ">=2.4", markers = "python_version < \"3.11\""}
|
||||||
tomli-w = ">=1.2"
|
tomli-w = ">=1.2"
|
||||||
typing-extensions = {version = ">=4.15", markers = "python_version < \"3.11\""}
|
typing-extensions = {version = ">=4.15", markers = "python_version < \"3.11\""}
|
||||||
|
|
@ -3036,7 +3143,7 @@ version = "2.6.3"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.9"
|
||||||
groups = ["docs"]
|
groups = ["agent", "docs"]
|
||||||
files = [
|
files = [
|
||||||
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
|
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
|
||||||
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
|
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
|
||||||
|
|
@ -3050,14 +3157,14 @@ zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uvicorn"
|
name = "uvicorn"
|
||||||
version = "0.41.0"
|
version = "0.42.0"
|
||||||
description = "The lightning-fast ASGI server."
|
description = "The lightning-fast ASGI server."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
groups = ["docs", "server", "test"]
|
groups = ["docs", "server", "test"]
|
||||||
files = [
|
files = [
|
||||||
{file = "uvicorn-0.41.0-py3-none-any.whl", hash = "sha256:29e35b1d2c36a04b9e180d4007ede3bcb32a85fbdfd6c6aeb3f26839de088187"},
|
{file = "uvicorn-0.42.0-py3-none-any.whl", hash = "sha256:96c30f5c7abe6f74ae8900a70e92b85ad6613b745d4879eb9b16ccad15645359"},
|
||||||
{file = "uvicorn-0.41.0.tar.gz", hash = "sha256:09d11cf7008da33113824ee5a1c6422d89fbc2ff476540d69a34c87fab8b571a"},
|
{file = "uvicorn-0.42.0.tar.gz", hash = "sha256:9b1f190ce15a2dd22e7758651d9b6d12df09a13d51ba5bf4fc33c383a48e1775"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
@ -3355,4 +3462,4 @@ files = [
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.1"
|
lock-version = "2.1"
|
||||||
python-versions = ">=3.10"
|
python-versions = ">=3.10"
|
||||||
content-hash = "69822bd89a48fd8e1cf248818ef298b1c105b975f2f9e912d570fada5e723480"
|
content-hash = "ad11d373747eff2d79496dcd3b575c40d9418dc2d3ad9617eab942285e3768a4"
|
||||||
|
|
|
||||||
|
|
@ -95,6 +95,12 @@ sphinx = "^7.2.6"
|
||||||
sphinx-rtd-theme = "^2.0.0"
|
sphinx-rtd-theme = "^2.0.0"
|
||||||
sphinx-autobuild = "^2024.2.4"
|
sphinx-autobuild = "^2024.2.4"
|
||||||
|
|
||||||
|
[tool.poetry.group.agent]
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[tool.poetry.group.agent.dependencies]
|
||||||
|
requests = ">=2.28,<3.0"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
flake8 = "^7.1.0"
|
flake8 = "^7.1.0"
|
||||||
black = "^24.3.0"
|
black = "^24.3.0"
|
||||||
|
|
@ -110,6 +116,7 @@ pylint = "^3.2.6" # For pyreverse, to automate the creation of UML diagrams
|
||||||
ria = "ria_toolkit_oss_cli.cli:cli"
|
ria = "ria_toolkit_oss_cli.cli:cli"
|
||||||
ria-tools = "ria_toolkit_oss_cli.cli:cli"
|
ria-tools = "ria_toolkit_oss_cli.cli:cli"
|
||||||
ria-server = "ria_toolkit_oss.server.cli:serve"
|
ria-server = "ria_toolkit_oss.server.cli:serve"
|
||||||
|
ria-agent = "ria_toolkit_oss.agent:main"
|
||||||
|
|
||||||
[tool.poetry.group.server.dependencies]
|
[tool.poetry.group.server.dependencies]
|
||||||
fastapi = ">=0.111,<1.0"
|
fastapi = ">=0.111,<1.0"
|
||||||
|
|
|
||||||
462
src/ria_toolkit_oss/agent.py
Normal file
462
src/ria_toolkit_oss/agent.py
Normal file
|
|
@ -0,0 +1,462 @@
|
||||||
|
"""RT-OSS Node Agent — connects to RIA Hub and dispatches work to local hardware.
|
||||||
|
|
||||||
|
The agent runs on any machine with an SDR attached and connects **outbound** to
|
||||||
|
RIA Hub. No inbound ports need to be opened on the user's machine, and the
|
||||||
|
connection works identically through NAT, corporate firewalls, or a Pi on a
|
||||||
|
cellular link.
|
||||||
|
|
||||||
|
Usage::
|
||||||
|
|
||||||
|
ria-agent \\
|
||||||
|
--hub https://riahub.company.com \\
|
||||||
|
--key <api-key> \\
|
||||||
|
--name lab-bench-1 \\
|
||||||
|
[--device plutosdr] \\
|
||||||
|
[--insecure]
|
||||||
|
|
||||||
|
The agent:
|
||||||
|
1. Registers with RIA Hub and receives a ``node_id``.
|
||||||
|
2. Sends a heartbeat every 30 s so the hub knows it is online.
|
||||||
|
3. Long-polls ``GET /orchestrator/nodes/{id}/commands`` (30 s timeout).
|
||||||
|
4. Executes received campaigns via :class:`ria_toolkit_oss.orchestration.executor.CampaignExecutor`.
|
||||||
|
5. Uploads recordings to the hub via chunked POST, keeping each request
|
||||||
|
under 50 MB so it passes through Cloudflare without needing the bypass
|
||||||
|
subdomain.
|
||||||
|
6. Deregisters cleanly on SIGINT / SIGTERM.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import math
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
logger = logging.getLogger("ria_agent")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tuneable constants
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_HEARTBEAT_INTERVAL = 30 # seconds between heartbeats
|
||||||
|
_POLL_TIMEOUT = 30 # server-side long-poll duration
|
||||||
|
_POLL_CLIENT_TIMEOUT = 40 # client read timeout — slightly longer than server
|
||||||
|
_RECONNECT_PAUSE = 5 # seconds to wait after a poll error before retrying
|
||||||
|
_CHUNK_SIZE = 50 * 1024 * 1024 # 50 MB — well below Cloudflare's 100 MB limit
|
||||||
|
_DIRECT_THRESHOLD = 90 * 1024 * 1024 # files above this use chunked upload
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Agent
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class NodeAgent:
|
||||||
|
"""Outbound-connecting agent that bridges RIA Hub to local SDR hardware.
|
||||||
|
|
||||||
|
All network I/O is initiated by the agent (outbound). RIA Hub never opens
|
||||||
|
a connection back to the agent's machine.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hub_url: str,
|
||||||
|
api_key: str,
|
||||||
|
name: str,
|
||||||
|
sdr_device: str = "unknown",
|
||||||
|
insecure: bool = False,
|
||||||
|
) -> None:
|
||||||
|
self.hub_url = hub_url.rstrip("/")
|
||||||
|
self.api_key = api_key
|
||||||
|
self.name = name
|
||||||
|
self.sdr_device = sdr_device
|
||||||
|
self.insecure = insecure
|
||||||
|
|
||||||
|
self.node_id: str | None = None
|
||||||
|
self._stop = threading.Event()
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ria_toolkit_oss
|
||||||
|
|
||||||
|
self._ria_version: str = getattr(ria_toolkit_oss, "__version__", "unknown")
|
||||||
|
except Exception:
|
||||||
|
self._ria_version = "unknown"
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Public entry point
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
"""Register, start the heartbeat thread, and enter the command loop.
|
||||||
|
|
||||||
|
Blocks until SIGINT or SIGTERM is received.
|
||||||
|
"""
|
||||||
|
self._register()
|
||||||
|
|
||||||
|
def _shutdown(sig: int, _frame: Any) -> None:
|
||||||
|
logger.info("Shutdown signal received — stopping agent")
|
||||||
|
self._stop.set()
|
||||||
|
|
||||||
|
signal.signal(signal.SIGINT, _shutdown)
|
||||||
|
signal.signal(signal.SIGTERM, _shutdown)
|
||||||
|
|
||||||
|
hb = threading.Thread(target=self._heartbeat_loop, daemon=True, name="ria-agent-heartbeat")
|
||||||
|
hb.start()
|
||||||
|
|
||||||
|
logger.info("Agent %r online (node_id=%s, hub=%s)", self.name, self.node_id, self.hub_url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._command_loop()
|
||||||
|
finally:
|
||||||
|
self._stop.set()
|
||||||
|
self._deregister()
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Registration
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _register(self) -> None:
|
||||||
|
resp = self._post(
|
||||||
|
"/orchestrator/nodes/register",
|
||||||
|
json={
|
||||||
|
"name": self.name,
|
||||||
|
"sdr_device": self.sdr_device,
|
||||||
|
"ria_toolkit_version": self._ria_version,
|
||||||
|
"capabilities": ["inference", "campaign"],
|
||||||
|
},
|
||||||
|
timeout=15,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
self.node_id = resp.json()["node_id"]
|
||||||
|
logger.info("Registered as %r (node_id=%s)", self.name, self.node_id)
|
||||||
|
|
||||||
|
def _deregister(self) -> None:
|
||||||
|
if not self.node_id:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self._delete(f"/orchestrator/nodes/{self.node_id}", timeout=10)
|
||||||
|
logger.info("Deregistered %s", self.node_id)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.debug("Deregister failed (ignored on shutdown): %s", exc)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Heartbeat thread
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _heartbeat_loop(self) -> None:
|
||||||
|
while not self._stop.wait(_HEARTBEAT_INTERVAL):
|
||||||
|
try:
|
||||||
|
resp = self._post(f"/orchestrator/nodes/{self.node_id}/heartbeat", timeout=10)
|
||||||
|
if resp.status_code == 404:
|
||||||
|
logger.warning("Heartbeat got 404 — hub lost registration, re-registering")
|
||||||
|
self._register()
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Heartbeat failed: %s", exc)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Command poll loop
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _command_loop(self) -> None:
|
||||||
|
while not self._stop.is_set():
|
||||||
|
try:
|
||||||
|
resp = self._get(
|
||||||
|
f"/orchestrator/nodes/{self.node_id}/commands",
|
||||||
|
timeout=_POLL_CLIENT_TIMEOUT,
|
||||||
|
)
|
||||||
|
if resp.status_code == 204:
|
||||||
|
# No command within the timeout window — loop immediately.
|
||||||
|
continue
|
||||||
|
if resp.status_code == 404:
|
||||||
|
logger.warning("Command poll got 404 — re-registering")
|
||||||
|
self._register()
|
||||||
|
continue
|
||||||
|
resp.raise_for_status()
|
||||||
|
cmd = resp.json()
|
||||||
|
logger.info("Received command: %s", cmd.get("command"))
|
||||||
|
self._dispatch(cmd)
|
||||||
|
except Exception as exc:
|
||||||
|
if not self._stop.is_set():
|
||||||
|
logger.warning("Command poll error: %s — retrying in %ds", exc, _RECONNECT_PAUSE)
|
||||||
|
time.sleep(_RECONNECT_PAUSE)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Command dispatch
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _dispatch(self, cmd: dict) -> None:
|
||||||
|
command = cmd.get("command")
|
||||||
|
if command == "run_campaign":
|
||||||
|
campaign_id: str = cmd.get("campaign_id") or str(uuid.uuid4())
|
||||||
|
config_dict: dict = cmd.get("payload") or {}
|
||||||
|
threading.Thread(
|
||||||
|
target=self._run_campaign,
|
||||||
|
args=(campaign_id, config_dict),
|
||||||
|
daemon=True,
|
||||||
|
name=f"campaign-{campaign_id[:8]}",
|
||||||
|
).start()
|
||||||
|
else:
|
||||||
|
logger.warning("Unknown command %r — ignored", command)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Campaign execution
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _run_campaign(self, campaign_id: str, config_dict: dict) -> None:
|
||||||
|
try:
|
||||||
|
from ria_toolkit_oss.orchestration.campaign import CampaignConfig
|
||||||
|
from ria_toolkit_oss.orchestration.executor import CampaignExecutor
|
||||||
|
except ImportError as exc:
|
||||||
|
logger.error(
|
||||||
|
"Campaign %s cannot start — ria_toolkit_oss not fully installed: %s",
|
||||||
|
campaign_id[:8],
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Campaign %s starting", campaign_id[:8])
|
||||||
|
try:
|
||||||
|
config = CampaignConfig.from_dict(config_dict)
|
||||||
|
executor = CampaignExecutor(config)
|
||||||
|
result = executor.run()
|
||||||
|
logger.info("Campaign %s completed — uploading recordings", campaign_id[:8])
|
||||||
|
self._upload_recordings(campaign_id, config, result)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Campaign %s failed: %s", campaign_id[:8], exc)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Recording upload (chunked for large files)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _upload_recordings(self, campaign_id: str, config: Any, result: Any) -> None:
|
||||||
|
output_repo: str | None = getattr(getattr(config, "output", None), "repo", None)
|
||||||
|
if not output_repo or "/" not in output_repo:
|
||||||
|
logger.warning("Campaign %s: no output.repo — skipping upload", campaign_id[:8])
|
||||||
|
return
|
||||||
|
|
||||||
|
repo_owner, repo_name = output_repo.split("/", 1)
|
||||||
|
base_url = f"{self.hub_url}/datasets/upload"
|
||||||
|
steps = getattr(result, "steps", None) or []
|
||||||
|
|
||||||
|
for step in steps:
|
||||||
|
output_path: str | None = getattr(step, "output_path", None)
|
||||||
|
if not output_path:
|
||||||
|
continue
|
||||||
|
device_id: str = getattr(step, "transmitter_id", "") or ""
|
||||||
|
for fpath in _sigmf_files(output_path):
|
||||||
|
filename = os.path.basename(fpath)
|
||||||
|
metadata = {
|
||||||
|
"filename": filename,
|
||||||
|
"repo_owner": repo_owner,
|
||||||
|
"repo_name": repo_name,
|
||||||
|
"device_id": device_id,
|
||||||
|
"campaign_id": campaign_id,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
resp_data = self._upload_file(base_url, fpath, metadata)
|
||||||
|
logger.info(
|
||||||
|
"Campaign %s: uploaded %s (oid=%s)",
|
||||||
|
campaign_id[:8],
|
||||||
|
filename,
|
||||||
|
resp_data.get("oid", "?"),
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Campaign %s: upload of %s failed: %s", campaign_id[:8], filename, exc)
|
||||||
|
|
||||||
|
def _upload_file(self, base_url: str, file_path: str, metadata: dict) -> dict:
|
||||||
|
"""Upload *file_path*, choosing chunked or direct path based on file size."""
|
||||||
|
import requests as _requests
|
||||||
|
|
||||||
|
size = os.path.getsize(file_path)
|
||||||
|
filename = os.path.basename(file_path)
|
||||||
|
headers = {"X-API-Key": self.api_key}
|
||||||
|
verify = not self.insecure
|
||||||
|
|
||||||
|
# Small files: single POST (unchanged endpoint, no assembly needed server-side).
|
||||||
|
if size <= _DIRECT_THRESHOLD:
|
||||||
|
with open(file_path, "rb") as fh:
|
||||||
|
resp = _requests.post(
|
||||||
|
base_url,
|
||||||
|
headers=headers,
|
||||||
|
files={"file": (filename, fh)},
|
||||||
|
data=metadata,
|
||||||
|
timeout=300,
|
||||||
|
verify=verify,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp.json()
|
||||||
|
|
||||||
|
# Large files: chunked upload — each request is ≤ 50 MB.
|
||||||
|
total_chunks = math.ceil(size / _CHUNK_SIZE)
|
||||||
|
upload_id = str(uuid.uuid4())
|
||||||
|
chunk_url = base_url + "/chunk"
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Chunked upload: %s (%d bytes, %d × %d MB chunks)",
|
||||||
|
filename,
|
||||||
|
size,
|
||||||
|
total_chunks,
|
||||||
|
_CHUNK_SIZE // (1024 * 1024),
|
||||||
|
)
|
||||||
|
|
||||||
|
resp_data: dict = {}
|
||||||
|
with open(file_path, "rb") as fh:
|
||||||
|
for i in range(total_chunks):
|
||||||
|
chunk = fh.read(_CHUNK_SIZE)
|
||||||
|
resp = _requests.post(
|
||||||
|
chunk_url,
|
||||||
|
headers=headers,
|
||||||
|
files={"file": (filename, chunk, "application/octet-stream")},
|
||||||
|
data={
|
||||||
|
**metadata,
|
||||||
|
"upload_id": upload_id,
|
||||||
|
"chunk_index": i,
|
||||||
|
"total_chunks": total_chunks,
|
||||||
|
},
|
||||||
|
timeout=120,
|
||||||
|
verify=verify,
|
||||||
|
)
|
||||||
|
if not resp.ok:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Chunk {i + 1}/{total_chunks} failed: " f"HTTP {resp.status_code}: {resp.text[:300]}"
|
||||||
|
)
|
||||||
|
resp_data = resp.json()
|
||||||
|
logger.debug("Chunk %d/%d uploaded", i + 1, total_chunks)
|
||||||
|
|
||||||
|
return resp_data
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# HTTP helpers
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _get(self, path: str, **kwargs: Any):
|
||||||
|
import requests as _requests
|
||||||
|
|
||||||
|
return _requests.get(
|
||||||
|
f"{self.hub_url}{path}",
|
||||||
|
headers={"X-API-Key": self.api_key},
|
||||||
|
verify=not self.insecure,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _post(self, path: str, **kwargs: Any):
|
||||||
|
import requests as _requests
|
||||||
|
|
||||||
|
return _requests.post(
|
||||||
|
f"{self.hub_url}{path}",
|
||||||
|
headers={"X-API-Key": self.api_key},
|
||||||
|
verify=not self.insecure,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _delete(self, path: str, **kwargs: Any):
|
||||||
|
import requests as _requests
|
||||||
|
|
||||||
|
return _requests.delete(
|
||||||
|
f"{self.hub_url}{path}",
|
||||||
|
headers={"X-API-Key": self.api_key},
|
||||||
|
verify=not self.insecure,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _sigmf_files(data_path: str) -> list[str]:
|
||||||
|
"""Return paths to both SigMF files (.sigmf-data and .sigmf-meta) for a recording."""
|
||||||
|
candidates = [data_path]
|
||||||
|
if data_path.endswith(".sigmf-data"):
|
||||||
|
candidates.append(data_path[: -len(".sigmf-data")] + ".sigmf-meta")
|
||||||
|
return [p for p in candidates if os.path.exists(p)]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# CLI entry point
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="ria-agent",
|
||||||
|
description=(
|
||||||
|
"RT-OSS Node Agent — connects outbound to RIA Hub and executes "
|
||||||
|
"campaigns / inference on local SDR hardware."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--hub",
|
||||||
|
required=True,
|
||||||
|
metavar="URL",
|
||||||
|
help="RIA Hub base URL, e.g. https://riahub.company.com",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--key",
|
||||||
|
required=True,
|
||||||
|
metavar="API_KEY",
|
||||||
|
help="Shared API key (must match [wac] API_KEY in the hub's app.ini)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--name",
|
||||||
|
required=True,
|
||||||
|
metavar="NAME",
|
||||||
|
help='Human-readable name shown in the Target Node dropdown, e.g. "lab-bench-1"',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--device",
|
||||||
|
default="unknown",
|
||||||
|
metavar="SDR",
|
||||||
|
help=(
|
||||||
|
"SDR device type reported to the hub (informational only). "
|
||||||
|
"Examples: plutosdr, usrp_b210, rtlsdr, mock. Default: unknown"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--insecure",
|
||||||
|
action="store_true",
|
||||||
|
help="Disable TLS certificate verification (dev/self-signed certs only)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--log-level",
|
||||||
|
default="INFO",
|
||||||
|
choices=["DEBUG", "INFO", "WARNING", "ERROR"],
|
||||||
|
help="Logging verbosity (default: INFO)",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=getattr(logging, args.log_level),
|
||||||
|
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
stream=sys.stderr,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Warn loudly if --insecure is used outside of development.
|
||||||
|
if args.insecure:
|
||||||
|
logger.warning(
|
||||||
|
"--insecure disables TLS certificate verification. "
|
||||||
|
"Only use this for local development with self-signed certs."
|
||||||
|
)
|
||||||
|
|
||||||
|
agent = NodeAgent(
|
||||||
|
hub_url=args.hub,
|
||||||
|
api_key=args.key,
|
||||||
|
name=args.name,
|
||||||
|
sdr_device=args.device,
|
||||||
|
insecure=args.insecure,
|
||||||
|
)
|
||||||
|
agent.run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -109,14 +109,11 @@ def copy_file(original_source: str | os.PathLike, new_source: str | os.PathLike)
|
||||||
|
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
original_file = h5py.File(original_source, "r")
|
with h5py.File(original_source, "r") as original_file:
|
||||||
|
|
||||||
with h5py.File(new_source, "w") as new_file:
|
with h5py.File(new_source, "w") as new_file:
|
||||||
for key in original_file.keys():
|
for key in original_file.keys():
|
||||||
original_file.copy(key, new_file)
|
original_file.copy(key, new_file)
|
||||||
|
|
||||||
original_file.close()
|
|
||||||
|
|
||||||
|
|
||||||
def make_empty_clone(original_source: str | os.PathLike, new_source: str | os.PathLike, example_length: int) -> None:
|
def make_empty_clone(original_source: str | os.PathLike, new_source: str | os.PathLike, example_length: int) -> None:
|
||||||
"""Creates a new HDF5 file with the same structure but will leave metadata and dataset empty for operations.
|
"""Creates a new HDF5 file with the same structure but will leave metadata and dataset empty for operations.
|
||||||
|
|
@ -218,4 +215,3 @@ def overwrite_file(source: str | os.PathLike, new_data: np.ndarray) -> None:
|
||||||
ds_name = tuple(f.keys())[0]
|
ds_name = tuple(f.keys())[0]
|
||||||
del f[ds_name]
|
del f[ds_name]
|
||||||
f.create_dataset(ds_name, data=new_data)
|
f.create_dataset(ds_name, data=new_data)
|
||||||
f.close()
|
|
||||||
|
|
|
||||||
|
|
@ -169,8 +169,10 @@ class IQDataset(RadioDataset, ABC):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if split_factor is not None and example_length is not None:
|
if split_factor is not None and example_length is not None:
|
||||||
# Raise warning and use split factor
|
# Warn and use split factor
|
||||||
raise Warning("split_factor and example_length should not both be specified.")
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn("split_factor and example_length should not both be specified.")
|
||||||
|
|
||||||
if not inplace:
|
if not inplace:
|
||||||
# ds = self.create_new_dataset(example_length=example_length)
|
# ds = self.create_new_dataset(example_length=example_length)
|
||||||
|
|
|
||||||
|
|
@ -393,6 +393,7 @@ class Recording:
|
||||||
"""
|
"""
|
||||||
if key not in self.metadata:
|
if key not in self.metadata:
|
||||||
self.add_to_metadata(key=key, value=value)
|
self.add_to_metadata(key=key, value=value)
|
||||||
|
return
|
||||||
|
|
||||||
if not _is_jsonable(value):
|
if not _is_jsonable(value):
|
||||||
raise ValueError("Value must be JSON serializable.")
|
raise ValueError("Value must be JSON serializable.")
|
||||||
|
|
@ -601,7 +602,7 @@ class Recording:
|
||||||
>>> recording = Recording(data=samples, metadata=metadata)
|
>>> recording = Recording(data=samples, metadata=metadata)
|
||||||
>>> recording.to_wav()
|
>>> recording.to_wav()
|
||||||
"""
|
"""
|
||||||
from utils.io.recording import to_wav
|
from ria_toolkit_oss.io.recording import to_wav
|
||||||
|
|
||||||
return to_wav(
|
return to_wav(
|
||||||
recording=self,
|
recording=self,
|
||||||
|
|
@ -651,7 +652,7 @@ class Recording:
|
||||||
>>> recording = Recording(data=samples, metadata=metadata)
|
>>> recording = Recording(data=samples, metadata=metadata)
|
||||||
>>> recording.to_blue()
|
>>> recording.to_blue()
|
||||||
"""
|
"""
|
||||||
from utils.io.recording import to_blue
|
from ria_toolkit_oss.io.recording import to_blue
|
||||||
|
|
||||||
return to_blue(recording=self, filename=filename, path=path, data_format=data_format, overwrite=overwrite)
|
return to_blue(recording=self, filename=filename, path=path, data_format=data_format, overwrite=overwrite)
|
||||||
|
|
||||||
|
|
@ -702,7 +703,14 @@ class Recording:
|
||||||
data = self.data[:, start_sample:end_sample]
|
data = self.data[:, start_sample:end_sample]
|
||||||
|
|
||||||
new_annotations = copy.deepcopy(self.annotations)
|
new_annotations = copy.deepcopy(self.annotations)
|
||||||
|
trimmed_annotations = []
|
||||||
for annotation in new_annotations:
|
for annotation in new_annotations:
|
||||||
|
# skip annotations entirely outside the trim window
|
||||||
|
if annotation.sample_start + annotation.sample_count <= start_sample:
|
||||||
|
continue
|
||||||
|
if annotation.sample_start >= end_sample:
|
||||||
|
continue
|
||||||
|
|
||||||
# trim annotation if it goes outside the trim boundaries
|
# trim annotation if it goes outside the trim boundaries
|
||||||
if annotation.sample_start < start_sample:
|
if annotation.sample_start < start_sample:
|
||||||
annotation.sample_count = annotation.sample_count - (start_sample - annotation.sample_start)
|
annotation.sample_count = annotation.sample_count - (start_sample - annotation.sample_start)
|
||||||
|
|
@ -713,8 +721,9 @@ class Recording:
|
||||||
|
|
||||||
# shift annotation to align with the new start point
|
# shift annotation to align with the new start point
|
||||||
annotation.sample_start = annotation.sample_start - start_sample
|
annotation.sample_start = annotation.sample_start - start_sample
|
||||||
|
trimmed_annotations.append(annotation)
|
||||||
|
|
||||||
return Recording(data=data, metadata=self.metadata, annotations=new_annotations)
|
return Recording(data=data, metadata=self.metadata, annotations=trimmed_annotations)
|
||||||
|
|
||||||
def normalize(self) -> Recording:
|
def normalize(self) -> Recording:
|
||||||
"""Scale the recording data, relative to its maximum value, so that the magnitude of the maximum sample is 1.
|
"""Scale the recording data, relative to its maximum value, so that the magnitude of the maximum sample is 1.
|
||||||
|
|
@ -743,7 +752,10 @@ class Recording:
|
||||||
>>> print(numpy.max(numpy.abs(normalized_recording.data)))
|
>>> print(numpy.max(numpy.abs(normalized_recording.data)))
|
||||||
1
|
1
|
||||||
"""
|
"""
|
||||||
scaled_data = self.data / np.max(abs(self.data))
|
max_val = np.max(abs(self.data))
|
||||||
|
if max_val == 0:
|
||||||
|
raise ValueError("Cannot normalize a recording with all-zero data.")
|
||||||
|
scaled_data = self.data / max_val
|
||||||
return Recording(data=scaled_data, metadata=self.metadata, annotations=self.annotations)
|
return Recording(data=scaled_data, metadata=self.metadata, annotations=self.annotations)
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,12 @@ Utilities for input/output operations on the ria_toolkit_oss.datatypes.Recording
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
|
import json
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import struct
|
import struct
|
||||||
|
import warnings
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
|
|
@ -91,15 +93,35 @@ def to_npy(
|
||||||
metadata = recording.metadata
|
metadata = recording.metadata
|
||||||
annotations = recording.annotations
|
annotations = recording.annotations
|
||||||
|
|
||||||
with open(file=fullpath, mode="wb") as f:
|
# Serialize metadata and annotations as JSON to avoid pickle-based deserialization.
|
||||||
np.save(f, data)
|
# JSON is safe; pickle allows arbitrary code execution when loading untrusted files.
|
||||||
np.save(f, metadata)
|
metadata_bytes = json.dumps(convert_to_serializable(metadata)).encode()
|
||||||
np.save(f, annotations)
|
annotations_bytes = json.dumps([a.__dict__ for a in annotations]).encode()
|
||||||
|
|
||||||
|
with open(file=fullpath, mode="wb") as f:
|
||||||
|
# Write format version marker first so from_npy can detect the safe JSON format.
|
||||||
|
np.save(f, np.array("ria-toolkit-oss-v2"))
|
||||||
|
np.save(f, data)
|
||||||
|
np.save(f, np.frombuffer(metadata_bytes, dtype=np.uint8))
|
||||||
|
np.save(f, np.frombuffer(annotations_bytes, dtype=np.uint8))
|
||||||
|
|
||||||
# print(f"Saved recording to {os.getcwd()}/{fullpath}")
|
|
||||||
return str(fullpath)
|
return str(fullpath)
|
||||||
|
|
||||||
|
|
||||||
|
_NPY_MAGIC = b"\x93NUMPY"
|
||||||
|
|
||||||
|
|
||||||
|
def _check_npy_magic(filepath: str) -> None:
|
||||||
|
"""Raise ValueError if the file does not start with the NumPy magic bytes."""
|
||||||
|
try:
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
header = f.read(6)
|
||||||
|
except OSError as e:
|
||||||
|
raise IOError(f"Cannot open file for validation: {filepath}") from e
|
||||||
|
if header != _NPY_MAGIC:
|
||||||
|
raise ValueError(f"File does not appear to be a valid NumPy .npy file (bad magic bytes): {filepath}")
|
||||||
|
|
||||||
|
|
||||||
def from_npy(file: os.PathLike | str, legacy: bool = False) -> Recording:
|
def from_npy(file: os.PathLike | str, legacy: bool = False) -> Recording:
|
||||||
"""Load a recording from a ``.npy`` binary file.
|
"""Load a recording from a ``.npy`` binary file.
|
||||||
|
|
||||||
|
|
@ -126,10 +148,33 @@ def from_npy(file: os.PathLike | str, legacy: bool = False) -> Recording:
|
||||||
if legacy:
|
if legacy:
|
||||||
return from_npy_legacy(filename)
|
return from_npy_legacy(filename)
|
||||||
|
|
||||||
|
_check_npy_magic(filename)
|
||||||
|
|
||||||
with open(file=filename, mode="rb") as f:
|
with open(file=filename, mode="rb") as f:
|
||||||
data = np.load(f, allow_pickle=True)
|
first = np.load(f, allow_pickle=False)
|
||||||
metadata = np.load(f, allow_pickle=True)
|
|
||||||
metadata = metadata.tolist()
|
if first.ndim == 0 and first.dtype.kind in ("U", "S") and str(first) == "ria-toolkit-oss-v2":
|
||||||
|
# Safe JSON format written by current to_npy.
|
||||||
|
data = np.load(f, allow_pickle=False)
|
||||||
|
raw_meta = np.load(f, allow_pickle=False)
|
||||||
|
metadata = json.loads(raw_meta.tobytes().decode())
|
||||||
|
try:
|
||||||
|
raw_ann = np.load(f, allow_pickle=False)
|
||||||
|
ann_list = json.loads(raw_ann.tobytes().decode())
|
||||||
|
from ria_toolkit_oss.datatypes.annotation import Annotation
|
||||||
|
|
||||||
|
annotations = [Annotation(**a) for a in ann_list]
|
||||||
|
except EOFError:
|
||||||
|
annotations = []
|
||||||
|
else:
|
||||||
|
# Legacy pickle-based format. Only load files from trusted sources.
|
||||||
|
warnings.warn(
|
||||||
|
"Loading .npy file in legacy pickle format — only load files from trusted sources. "
|
||||||
|
"Re-save with to_npy() to upgrade to the safe JSON format.",
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
data = first # already loaded without pickle (numeric array)
|
||||||
|
metadata = np.load(f, allow_pickle=True).tolist()
|
||||||
try:
|
try:
|
||||||
annotations = list(np.load(f, allow_pickle=True))
|
annotations = list(np.load(f, allow_pickle=True))
|
||||||
except EOFError:
|
except EOFError:
|
||||||
|
|
@ -171,14 +216,20 @@ def from_npy_legacy(file: os.PathLike | str) -> Recording:
|
||||||
# Rebuild with .npy extension.
|
# Rebuild with .npy extension.
|
||||||
filename = str(filename) + ".npy"
|
filename = str(filename) + ".npy"
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"from_npy_legacy uses pickle deserialization for extended metadata — only load files from trusted sources.",
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
_check_npy_magic(filename)
|
||||||
|
|
||||||
with open(filename, "rb") as f:
|
with open(filename, "rb") as f:
|
||||||
# Read IQ data (2, N) format
|
# Read IQ data (2, N) format
|
||||||
iqdata = np.load(f)
|
iqdata = np.load(f, allow_pickle=False)
|
||||||
|
|
||||||
# Read basic metadata array [center_freq, rec_length, decimation, sample_rate]
|
# Read basic metadata array [center_freq, rec_length, decimation, sample_rate]
|
||||||
meta = np.load(f)
|
meta = np.load(f, allow_pickle=False)
|
||||||
|
|
||||||
# Read extended metadata dict
|
# Read extended metadata dict (legacy format requires pickle)
|
||||||
extended_meta = np.load(f, allow_pickle=True)[0]
|
extended_meta = np.load(f, allow_pickle=True)[0]
|
||||||
|
|
||||||
# Convert IQ data from (2, N) to (N,) complex format
|
# Convert IQ data from (2, N) to (N,) complex format
|
||||||
|
|
@ -351,7 +402,7 @@ def from_sigmf(file: os.PathLike | str) -> Recording:
|
||||||
# Process core keys
|
# Process core keys
|
||||||
if key.startswith("core:"):
|
if key.startswith("core:"):
|
||||||
base_key = key[5:] # Remove 'core:' prefix
|
base_key = key[5:] # Remove 'core:' prefix
|
||||||
converted_key = SIGMF_KEY_CONVERSION.get(base_key, base_key)
|
converted_key = SIGMF_KEY_CONVERSION.get(key, base_key)
|
||||||
# Process ria keys
|
# Process ria keys
|
||||||
elif key.startswith("ria:"):
|
elif key.startswith("ria:"):
|
||||||
converted_key = key[4:] # Remove 'ria:' prefix
|
converted_key = key[4:] # Remove 'ria:' prefix
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,15 @@ from typing import Optional
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
# Allowed characters in campaign names when used as filename components.
|
||||||
|
_SAFE_NAME_RE = re.compile(r"[^a-zA-Z0-9_\-]")
|
||||||
|
|
||||||
|
# Reasonable RF bounds for consumer/research SDR hardware.
|
||||||
|
_FREQ_MIN_HZ = 1.0 # 1 Hz
|
||||||
|
_FREQ_MAX_HZ = 300e9 # 300 GHz
|
||||||
|
_GAIN_MIN_DB = -30.0
|
||||||
|
_GAIN_MAX_DB = 120.0
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Parsing helpers
|
# Parsing helpers
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -48,21 +57,40 @@ def parse_frequency(value: str | float | int) -> float:
|
||||||
2.45e9 (numeric) → 2_450_000_000.0
|
2.45e9 (numeric) → 2_450_000_000.0
|
||||||
"""
|
"""
|
||||||
if isinstance(value, (int, float)):
|
if isinstance(value, (int, float)):
|
||||||
return float(value)
|
result = float(value)
|
||||||
|
if not (_FREQ_MIN_HZ <= result <= _FREQ_MAX_HZ):
|
||||||
|
raise ValueError(
|
||||||
|
f"Frequency {result:.3g} Hz is outside the supported range "
|
||||||
|
f"({_FREQ_MIN_HZ:.0f} Hz – {_FREQ_MAX_HZ:.3g} Hz)"
|
||||||
|
)
|
||||||
|
return result
|
||||||
value = str(value).strip()
|
value = str(value).strip()
|
||||||
|
|
||||||
# Try bare numeric first (handles scientific notation like "915e6")
|
# Try bare numeric first (handles scientific notation like "915e6")
|
||||||
try:
|
try:
|
||||||
return float(value)
|
result = float(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
else:
|
||||||
|
if not (_FREQ_MIN_HZ <= result <= _FREQ_MAX_HZ):
|
||||||
|
raise ValueError(
|
||||||
|
f"Frequency {result:.3g} Hz is outside the supported range "
|
||||||
|
f"({_FREQ_MIN_HZ:.0f} Hz – {_FREQ_MAX_HZ:.3g} Hz): '{value}'"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
# Handle suffix notation: "2.45GHz", "40MHz", "40M", "433k"
|
# Handle suffix notation: "2.45GHz", "40MHz", "40M", "433k"
|
||||||
match = re.fullmatch(r"([\d.]+)\s*(k|M|G)(?:\s*Hz?)?", value, re.IGNORECASE)
|
match = re.fullmatch(r"([\d.]+)\s*(k|M|G)(?:\s*Hz?)?", value, re.IGNORECASE)
|
||||||
if match:
|
if match:
|
||||||
amount = float(match.group(1))
|
amount = float(match.group(1))
|
||||||
suffix = match.group(2).upper()
|
suffix = match.group(2).upper()
|
||||||
return amount * {"K": 1e3, "M": 1e6, "G": 1e9}[suffix]
|
result = amount * {"K": 1e3, "M": 1e6, "G": 1e9}[suffix]
|
||||||
|
if not (_FREQ_MIN_HZ <= result <= _FREQ_MAX_HZ):
|
||||||
|
raise ValueError(
|
||||||
|
f"Frequency {result:.3g} Hz is outside the supported range "
|
||||||
|
f"({_FREQ_MIN_HZ:.0f} Hz – {_FREQ_MAX_HZ:.3g} Hz): '{value}'"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
raise ValueError(f"Cannot parse frequency: '{value}'")
|
raise ValueError(f"Cannot parse frequency: '{value}'")
|
||||||
|
|
||||||
|
|
@ -76,14 +104,22 @@ def parse_gain(value: str | float | int) -> float | str:
|
||||||
40 (numeric) → 40.0
|
40 (numeric) → 40.0
|
||||||
"""
|
"""
|
||||||
if isinstance(value, (int, float)):
|
if isinstance(value, (int, float)):
|
||||||
return float(value)
|
result = float(value)
|
||||||
|
if not (_GAIN_MIN_DB <= result <= _GAIN_MAX_DB):
|
||||||
|
raise ValueError(f"Gain {result} dB is outside the supported range ({_GAIN_MIN_DB} – {_GAIN_MAX_DB} dB)")
|
||||||
|
return result
|
||||||
value = str(value).strip()
|
value = str(value).strip()
|
||||||
if value.lower() == "auto":
|
if value.lower() == "auto":
|
||||||
return "auto"
|
return "auto"
|
||||||
match = re.fullmatch(r"([\d.+\-]+)\s*dB?", value, re.IGNORECASE)
|
match = re.fullmatch(r"([\d.+\-]+)\s*dB?", value, re.IGNORECASE)
|
||||||
if not match:
|
if not match:
|
||||||
raise ValueError(f"Cannot parse gain: '{value}'")
|
raise ValueError(f"Cannot parse gain: '{value}'")
|
||||||
return float(match.group(1))
|
result = float(match.group(1))
|
||||||
|
if not (_GAIN_MIN_DB <= result <= _GAIN_MAX_DB):
|
||||||
|
raise ValueError(
|
||||||
|
f"Gain {result} dB is outside the supported range ({_GAIN_MIN_DB} – {_GAIN_MAX_DB} dB): '{value}'"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def parse_bandwidth_mhz(value: str | float | int | None) -> Optional[float]:
|
def parse_bandwidth_mhz(value: str | float | int | None) -> Optional[float]:
|
||||||
|
|
@ -273,8 +309,12 @@ class CampaignConfig:
|
||||||
transmitters = [TransmitterConfig.from_dict(t) for t in raw.get("transmitters", [])]
|
transmitters = [TransmitterConfig.from_dict(t) for t in raw.get("transmitters", [])]
|
||||||
if not transmitters:
|
if not transmitters:
|
||||||
raise ValueError("Campaign config must define at least one transmitter")
|
raise ValueError("Campaign config must define at least one transmitter")
|
||||||
|
if "recorder" not in raw:
|
||||||
|
raise ValueError("Campaign config is missing required 'recorder' section")
|
||||||
|
raw_name = str(campaign_meta.get("name", "unnamed"))
|
||||||
|
safe_name = _SAFE_NAME_RE.sub("_", raw_name)
|
||||||
return cls(
|
return cls(
|
||||||
name=str(campaign_meta.get("name", "unnamed")),
|
name=safe_name,
|
||||||
mode=str(campaign_meta.get("mode", "controlled_testbed")),
|
mode=str(campaign_meta.get("mode", "controlled_testbed")),
|
||||||
recorder=RecorderConfig.from_dict(raw["recorder"]),
|
recorder=RecorderConfig.from_dict(raw["recorder"]),
|
||||||
transmitters=transmitters,
|
transmitters=transmitters,
|
||||||
|
|
@ -332,9 +372,13 @@ class CampaignConfig:
|
||||||
transmitters = [TransmitterConfig.from_dict(t) for t in raw.get("transmitters", [])]
|
transmitters = [TransmitterConfig.from_dict(t) for t in raw.get("transmitters", [])]
|
||||||
if not transmitters:
|
if not transmitters:
|
||||||
raise ValueError("Campaign config must define at least one transmitter")
|
raise ValueError("Campaign config must define at least one transmitter")
|
||||||
|
if "recorder" not in raw:
|
||||||
|
raise ValueError(f"Campaign config is missing required 'recorder' section in {path}")
|
||||||
|
raw_name = str(campaign_meta.get("name", path.stem))
|
||||||
|
safe_name = _SAFE_NAME_RE.sub("_", raw_name)
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
name=str(campaign_meta.get("name", path.stem)),
|
name=safe_name,
|
||||||
mode=str(campaign_meta.get("mode", "controlled_testbed")),
|
mode=str(campaign_meta.get("mode", "controlled_testbed")),
|
||||||
recorder=RecorderConfig.from_dict(raw["recorder"]),
|
recorder=RecorderConfig.from_dict(raw["recorder"]),
|
||||||
transmitters=transmitters,
|
transmitters=transmitters,
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,9 @@ _DEVICE_ALIASES = {
|
||||||
"rtlsdr": "rtlsdr",
|
"rtlsdr": "rtlsdr",
|
||||||
"rtl_sdr": "rtlsdr",
|
"rtl_sdr": "rtlsdr",
|
||||||
"thinkrf": "thinkrf",
|
"thinkrf": "thinkrf",
|
||||||
|
# Simulated device — no hardware required
|
||||||
|
"mock": "mock",
|
||||||
|
"sim": "mock",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -127,14 +130,22 @@ def _run_script(script: str, *args: str, timeout: float = 15.0) -> str:
|
||||||
A non-zero return code raises RuntimeError.
|
A non-zero return code raises RuntimeError.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
script: Path to executable script.
|
script: Path to executable script. Must be an absolute path to an
|
||||||
|
existing regular file. Relative paths are rejected to prevent
|
||||||
|
accidentally executing files that are not the intended script.
|
||||||
*args: Positional arguments forwarded to the script.
|
*args: Positional arguments forwarded to the script.
|
||||||
timeout: Maximum seconds to wait.
|
timeout: Maximum seconds to wait.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Script stdout as a string.
|
Script stdout as a string.
|
||||||
"""
|
"""
|
||||||
cmd = [script, *args]
|
script_path = Path(script).resolve()
|
||||||
|
if not script_path.is_absolute():
|
||||||
|
raise RuntimeError(f"Script path must be absolute: {script}")
|
||||||
|
if not script_path.is_file():
|
||||||
|
raise RuntimeError(f"Script not found or is not a regular file: {script}")
|
||||||
|
|
||||||
|
cmd = [str(script_path), *args]
|
||||||
logger.debug(f"Running script: {' '.join(cmd)}")
|
logger.debug(f"Running script: {' '.join(cmd)}")
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
|
|
@ -251,7 +262,7 @@ class CampaignExecutor:
|
||||||
|
|
||||||
def _init_sdr(self) -> None:
|
def _init_sdr(self) -> None:
|
||||||
"""Initialise and configure the SDR recorder."""
|
"""Initialise and configure the SDR recorder."""
|
||||||
from ria_toolkit_oss_cli.ria_toolkit_oss.common import get_sdr_device
|
from ria_toolkit_oss.sdr import get_sdr_device
|
||||||
|
|
||||||
rec = self.config.recorder
|
rec = self.config.recorder
|
||||||
device_name = _DEVICE_ALIASES.get(rec.device.lower(), rec.device.lower())
|
device_name = _DEVICE_ALIASES.get(rec.device.lower(), rec.device.lower())
|
||||||
|
|
@ -411,11 +422,21 @@ class CampaignExecutor:
|
||||||
"""Save a recording to disk and return the data file path."""
|
"""Save a recording to disk and return the data file path."""
|
||||||
out = self.config.output
|
out = self.config.output
|
||||||
rel_filename = build_output_filename(device_id, step)
|
rel_filename = build_output_filename(device_id, step)
|
||||||
out_dir = Path(out.path)
|
out_dir = Path(out.path).resolve()
|
||||||
|
|
||||||
# build_output_filename returns "<device_id>/<label>"
|
# build_output_filename returns "<device_id>/<label>"
|
||||||
# to_sigmf needs filename (base) and path (dir) separately
|
# to_sigmf needs filename (base) and path (dir) separately
|
||||||
parts = Path(rel_filename)
|
parts = Path(rel_filename)
|
||||||
subdir = out_dir / parts.parent
|
subdir = (out_dir / parts.parent).resolve()
|
||||||
|
|
||||||
|
# Prevent path traversal: the resolved subdir must stay within the configured output directory.
|
||||||
|
try:
|
||||||
|
subdir.relative_to(out_dir)
|
||||||
|
except ValueError:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Output path escape detected: '{subdir}' is outside configured output directory '{out_dir}'"
|
||||||
|
)
|
||||||
|
|
||||||
subdir.mkdir(parents=True, exist_ok=True)
|
subdir.mkdir(parents=True, exist_ok=True)
|
||||||
base = parts.name
|
base = parts.name
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,39 @@ It streamlines tasks involving signal reception and transmission, as well as com
|
||||||
operations such as detecting and configuring available devices.
|
operations such as detecting and configuring available devices.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__all__ = ["SDR", "SDRError", "SDRParameterError"]
|
__all__ = ["SDR", "SDRError", "SDRParameterError", "MockSDR", "get_sdr_device"]
|
||||||
|
|
||||||
|
from .mock import MockSDR
|
||||||
from .sdr import SDR, SDRError, SDRParameterError
|
from .sdr import SDR, SDRError, SDRParameterError
|
||||||
|
|
||||||
|
|
||||||
|
def get_sdr_device(device_type: str, ident: str | None = None, tx: bool = False) -> SDR:
|
||||||
|
"""Return an SDR instance for *device_type*.
|
||||||
|
|
||||||
|
For ``"mock"`` / ``"sim"`` device types, returns a :class:`MockSDR`
|
||||||
|
immediately (no hardware required). For all real device types, delegates
|
||||||
|
to ``ria_toolkit_oss_cli.ria_toolkit_oss.common.get_sdr_device`` if the
|
||||||
|
CLI package is installed; otherwise raises ``ImportError`` with a helpful
|
||||||
|
message.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
device_type: Device name (``"mock"``, ``"pluto"``, ``"usrp"``, …).
|
||||||
|
ident: Optional device identifier (IP address, serial number, …).
|
||||||
|
tx: If True, require TX capability.
|
||||||
|
"""
|
||||||
|
if device_type in ("mock", "sim"):
|
||||||
|
return MockSDR()
|
||||||
|
|
||||||
|
# Delegate real device types to the CLI package which holds the driver
|
||||||
|
# imports behind hardware-specific optional dependencies.
|
||||||
|
try:
|
||||||
|
from ria_toolkit_oss_cli.ria_toolkit_oss.common import (
|
||||||
|
get_sdr_device as _cli_get,
|
||||||
|
)
|
||||||
|
except ImportError as exc:
|
||||||
|
raise ImportError(
|
||||||
|
f"ria_toolkit_oss_cli is required to use hardware SDR device '{device_type}'. "
|
||||||
|
"Install it with: pip install ria-toolkit-oss-cli"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return _cli_get(device_type, ident=ident, tx=tx)
|
||||||
|
|
|
||||||
131
src/ria_toolkit_oss/sdr/mock.py
Normal file
131
src/ria_toolkit_oss/sdr/mock.py
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
"""Simulated SDR device for testing without hardware.
|
||||||
|
|
||||||
|
Set ``recorder.device = "mock"`` (or ``"sim"``) in a campaign config to use
|
||||||
|
this driver. The inference loop can also use it by specifying ``device:
|
||||||
|
"mock"`` in the SDR start request.
|
||||||
|
|
||||||
|
The mock generates complex float32 AWGN samples normalised to [-1, 1].
|
||||||
|
It satisfies both interfaces used in this codebase:
|
||||||
|
|
||||||
|
- ``record(num_samples)`` / ``_stream_rx(callback)`` — used by
|
||||||
|
``CampaignExecutor`` (inherits from ``SDR`` base class).
|
||||||
|
- ``rx(num_samples)`` — PlutoSDR-style interface used by the controller
|
||||||
|
inference loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from ria_toolkit_oss.sdr.sdr import SDR
|
||||||
|
|
||||||
|
_DEFAULT_BUFFER_SIZE = 4096
|
||||||
|
# Simulated sample rate throttle: sleep this long between buffers so the
|
||||||
|
# loop does not spin at 100% CPU. 10 ms ≈ 100 buffers/s which is fine for
|
||||||
|
# tests and campaign execution timing.
|
||||||
|
_SLEEP_PER_BUFFER_S = 0.01
|
||||||
|
|
||||||
|
|
||||||
|
class MockSDR(SDR):
|
||||||
|
"""Software-simulated SDR that generates AWGN noise.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
buffer_size: Number of complex samples per streaming buffer.
|
||||||
|
seed: Optional RNG seed for reproducible output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, buffer_size: int = _DEFAULT_BUFFER_SIZE, seed: int | None = None):
|
||||||
|
super().__init__()
|
||||||
|
self.rx_buffer_size: int = buffer_size
|
||||||
|
self._rng = np.random.default_rng(seed)
|
||||||
|
|
||||||
|
# Direct attribute aliases used by _apply_sdr_config in the controller.
|
||||||
|
self.center_freq: float = 2.45e9
|
||||||
|
self.sample_rate: float = 10e6
|
||||||
|
self.gain: float = 40.0
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Abstract method implementations
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def init_rx(
|
||||||
|
self,
|
||||||
|
sample_rate: float,
|
||||||
|
center_frequency: float,
|
||||||
|
gain,
|
||||||
|
channel: int = 0,
|
||||||
|
gain_mode: str = "manual",
|
||||||
|
) -> None:
|
||||||
|
self.rx_sample_rate = float(sample_rate)
|
||||||
|
self.rx_center_frequency = float(center_frequency)
|
||||||
|
self.rx_gain = 40.0 if gain is None else float(gain)
|
||||||
|
# Mirror to the attribute names used by _apply_sdr_config.
|
||||||
|
self.sample_rate = self.rx_sample_rate
|
||||||
|
self.center_freq = self.rx_center_frequency
|
||||||
|
self.gain = self.rx_gain
|
||||||
|
self._rx_initialized = True
|
||||||
|
|
||||||
|
def init_tx(
|
||||||
|
self,
|
||||||
|
sample_rate: float,
|
||||||
|
center_frequency: float,
|
||||||
|
gain,
|
||||||
|
channel: int = 0,
|
||||||
|
gain_mode: str = "manual",
|
||||||
|
) -> None:
|
||||||
|
self.tx_sample_rate = float(sample_rate)
|
||||||
|
self.tx_center_frequency = float(center_frequency)
|
||||||
|
self.tx_gain = 40.0 if gain is None else float(gain)
|
||||||
|
self._tx_initialized = True
|
||||||
|
|
||||||
|
def _stream_rx(self, callback) -> None:
|
||||||
|
"""Generate 1-D AWGN buffers and pass each to *callback* until stopped.
|
||||||
|
|
||||||
|
Uses 1-D arrays so the base class ``_validate_buffer`` check does not
|
||||||
|
incorrectly flag them as corrupted (the (1, N) form triggers a false
|
||||||
|
positive in the all-same-value check).
|
||||||
|
"""
|
||||||
|
self._enable_rx = True
|
||||||
|
while self._enable_rx:
|
||||||
|
buf = self._awgn(self.rx_buffer_size)
|
||||||
|
callback(buf)
|
||||||
|
time.sleep(_SLEEP_PER_BUFFER_S)
|
||||||
|
|
||||||
|
def _stream_tx(self, callback) -> None:
|
||||||
|
self._enable_tx = True
|
||||||
|
while self._enable_tx:
|
||||||
|
callback(self.rx_buffer_size)
|
||||||
|
time.sleep(_SLEEP_PER_BUFFER_S)
|
||||||
|
|
||||||
|
def set_clock_source(self, source: str) -> None:
|
||||||
|
pass # no-op
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
self._enable_rx = False
|
||||||
|
self._enable_tx = False
|
||||||
|
self._rx_initialized = False
|
||||||
|
self._tx_initialized = False
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# PlutoSDR-style interface used by the controller inference loop
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def rx(self, num_samples: int) -> np.ndarray:
|
||||||
|
"""Return *num_samples* complex64 AWGN samples (PlutoSDR-style)."""
|
||||||
|
return self._awgn(num_samples)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Internal helpers
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _awgn(self, n: int) -> np.ndarray:
|
||||||
|
"""Return *n* normalised complex64 AWGN samples as a 1-D array."""
|
||||||
|
real = self._rng.standard_normal(n).astype(np.float32)
|
||||||
|
imag = self._rng.standard_normal(n).astype(np.float32)
|
||||||
|
buf = real + 1j * imag
|
||||||
|
peak = np.abs(buf).max()
|
||||||
|
if peak > 1e-9:
|
||||||
|
buf /= peak
|
||||||
|
return buf
|
||||||
|
|
@ -32,7 +32,6 @@ class SDR(ABC):
|
||||||
self._accumulated_buffer = None
|
self._accumulated_buffer = None
|
||||||
self._max_num_buffers = None
|
self._max_num_buffers = None
|
||||||
self._num_buffers_processed = 0
|
self._num_buffers_processed = 0
|
||||||
self._accumulated_buffer = None
|
|
||||||
self._last_buffer = None
|
self._last_buffer = None
|
||||||
self._corrupted_buffer_count = 0
|
self._corrupted_buffer_count = 0
|
||||||
|
|
||||||
|
|
@ -282,7 +281,7 @@ class SDR(ABC):
|
||||||
elif num_samples is not None:
|
elif num_samples is not None:
|
||||||
self._num_samples_to_transmit = num_samples
|
self._num_samples_to_transmit = num_samples
|
||||||
elif tx_time is not None:
|
elif tx_time is not None:
|
||||||
self._num_samples_to_transmit = tx_time * self.tx_sample_rate
|
self._num_samples_to_transmit = int(tx_time * self.tx_sample_rate)
|
||||||
else:
|
else:
|
||||||
self._num_samples_to_transmit = len(recording)
|
self._num_samples_to_transmit = len(recording)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,13 @@
|
||||||
"""API key authentication dependency."""
|
"""API key authentication dependency."""
|
||||||
|
|
||||||
|
import hmac
|
||||||
|
import logging
|
||||||
|
|
||||||
from fastapi import Depends, HTTPException, Request, status
|
from fastapi import Depends, HTTPException, Request, status
|
||||||
from fastapi.security import APIKeyHeader
|
from fastapi.security import APIKeyHeader
|
||||||
|
|
||||||
_api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
_api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def require_api_key(
|
async def require_api_key(
|
||||||
|
|
@ -18,7 +22,14 @@ async def require_api_key(
|
||||||
expected: str = request.app.state.api_key
|
expected: str = request.app.state.api_key
|
||||||
if not expected:
|
if not expected:
|
||||||
return # dev mode: no key set, allow all
|
return # dev mode: no key set, allow all
|
||||||
if api_key != expected:
|
if not hmac.compare_digest(api_key or "", expected):
|
||||||
|
client = getattr(request.client, "host", "unknown")
|
||||||
|
logger.warning(
|
||||||
|
"Authentication failure from %s — %s %s",
|
||||||
|
client,
|
||||||
|
request.method,
|
||||||
|
request.url.path,
|
||||||
|
)
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail="Invalid or missing API key",
|
detail="Invalid or missing API key",
|
||||||
|
|
|
||||||
|
|
@ -21,8 +21,7 @@ def serve() -> None:
|
||||||
import uvicorn
|
import uvicorn
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise SystemExit(
|
raise SystemExit(
|
||||||
"uvicorn is required to run the RT-OSS server.\n"
|
"uvicorn is required to run the RT-OSS server.\n" "Install it with: pip install 'ria-toolkit-oss[server]'"
|
||||||
"Install it with: pip install 'ria-toolkit-oss[server]'"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .app import create_app
|
from .app import create_app
|
||||||
|
|
@ -34,6 +33,15 @@ def serve() -> None:
|
||||||
app = create_app(api_key=api_key)
|
app = create_app(api_key=api_key)
|
||||||
|
|
||||||
if not api_key:
|
if not api_key:
|
||||||
print("WARNING: RT_OSS_API_KEY not set — running in unauthenticated dev mode.")
|
print(
|
||||||
|
"\n"
|
||||||
|
"╔══════════════════════════════════════════════════════════════╗\n"
|
||||||
|
"║ WARNING: RT_OSS_API_KEY is not set. ║\n"
|
||||||
|
"║ The server is running with NO authentication. ║\n"
|
||||||
|
"║ Anyone who can reach this port has full API access. ║\n"
|
||||||
|
"║ Set RT_OSS_API_KEY=<secret> before exposing to a network. ║\n"
|
||||||
|
"╚══════════════════════════════════════════════════════════════╝\n",
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
|
||||||
uvicorn.run(app, host=host, port=port)
|
uvicorn.run(app, host=host, port=port)
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,9 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pydantic import BaseModel, field_validator
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Orchestrator
|
# Orchestrator
|
||||||
|
|
@ -50,6 +52,21 @@ class LoadModelRequest(BaseModel):
|
||||||
model_path: str
|
model_path: str
|
||||||
label_map: dict[str, int] # class_name -> class_index
|
label_map: dict[str, int] # class_name -> class_index
|
||||||
|
|
||||||
|
@field_validator("model_path")
|
||||||
|
@classmethod
|
||||||
|
def validate_model_path(cls, v: str) -> str:
|
||||||
|
p = Path(v)
|
||||||
|
if ".." in p.parts:
|
||||||
|
raise ValueError("model_path must not contain path traversal components")
|
||||||
|
if p.suffix.lower() != ".onnx":
|
||||||
|
raise ValueError("model_path must point to an .onnx file")
|
||||||
|
# Resolve to catch symlink-based traversal; return the resolved absolute path
|
||||||
|
# so callers always work with the real filesystem location.
|
||||||
|
resolved = p.resolve()
|
||||||
|
if resolved.suffix.lower() != ".onnx":
|
||||||
|
raise ValueError("Resolved model_path must point to an .onnx file")
|
||||||
|
return str(resolved)
|
||||||
|
|
||||||
|
|
||||||
class LoadModelResponse(BaseModel):
|
class LoadModelResponse(BaseModel):
|
||||||
loaded: bool
|
loaded: bool
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ from __future__ import annotations
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from fastapi import APIRouter, HTTPException, status
|
from fastapi import APIRouter, HTTPException, status
|
||||||
|
|
@ -39,8 +40,14 @@ def _load_onnx_session(model_path: str):
|
||||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||||
detail="onnxruntime not installed. Install with: pip install ria-toolkit-oss[server]",
|
detail="onnxruntime not installed. Install with: pip install ria-toolkit-oss[server]",
|
||||||
)
|
)
|
||||||
|
resolved = Path(model_path).resolve()
|
||||||
|
if not resolved.is_file():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||||
|
detail=f"Model file not found: {model_path}",
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return ort.InferenceSession(model_path, providers=["CPUExecutionProvider"])
|
return ort.InferenceSession(str(resolved), providers=["CPUExecutionProvider"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"Failed to load ONNX model: {e}")
|
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"Failed to load ONNX model: {e}")
|
||||||
|
|
||||||
|
|
@ -122,7 +129,8 @@ def _inference_loop(state: InferenceState, sdr) -> None:
|
||||||
probs = softmax(logits)
|
probs = softmax(logits)
|
||||||
pred_idx = int(np.argmax(probs))
|
pred_idx = int(np.argmax(probs))
|
||||||
prediction = state.index_to_label.get(pred_idx, str(pred_idx))
|
prediction = state.index_to_label.get(pred_idx, str(pred_idx))
|
||||||
except Exception:
|
except Exception as exc:
|
||||||
|
logger.warning("Inference prediction failed: %s", exc)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
is_idle = prediction in _IDLE_LABELS
|
is_idle = prediction in _IDLE_LABELS
|
||||||
|
|
@ -142,7 +150,7 @@ def _inference_loop(state: InferenceState, sdr) -> None:
|
||||||
sdr.close()
|
sdr.close()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
state.running = False
|
state.set_running(False)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/load", response_model=LoadModelResponse)
|
@router.post("/load", response_model=LoadModelResponse)
|
||||||
|
|
@ -153,7 +161,7 @@ async def load_model(request: LoadModelRequest):
|
||||||
``enrolled_devices`` enriches status responses with human names and authorization flags.
|
``enrolled_devices`` enriches status responses with human names and authorization flags.
|
||||||
"""
|
"""
|
||||||
existing = get_inference()
|
existing = get_inference()
|
||||||
if existing and existing.running:
|
if existing and existing.get_running():
|
||||||
_stop_current_inference(existing)
|
_stop_current_inference(existing)
|
||||||
|
|
||||||
session = _load_onnx_session(request.model_path)
|
session = _load_onnx_session(request.model_path)
|
||||||
|
|
@ -176,7 +184,7 @@ async def start_inference(request: StartInferenceRequest):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_409_CONFLICT, detail="No model loaded. Call POST /inference/load first."
|
status_code=status.HTTP_409_CONFLICT, detail="No model loaded. Call POST /inference/load first."
|
||||||
)
|
)
|
||||||
if state.running:
|
if state.get_running():
|
||||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Inference is already running.")
|
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Inference is already running.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -199,7 +207,7 @@ async def start_inference(request: StartInferenceRequest):
|
||||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"SDR initialisation failed: {e}")
|
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"SDR initialisation failed: {e}")
|
||||||
|
|
||||||
state.stop_event.clear()
|
state.stop_event.clear()
|
||||||
state.running = True
|
state.set_running(True)
|
||||||
state.thread = threading.Thread(target=_inference_loop, args=(state, sdr), daemon=True)
|
state.thread = threading.Thread(target=_inference_loop, args=(state, sdr), daemon=True)
|
||||||
state.thread.start()
|
state.thread.start()
|
||||||
return StartInferenceResponse(running=True)
|
return StartInferenceResponse(running=True)
|
||||||
|
|
@ -209,7 +217,7 @@ async def start_inference(request: StartInferenceRequest):
|
||||||
async def stop_inference():
|
async def stop_inference():
|
||||||
"""Stop the running inference loop."""
|
"""Stop the running inference loop."""
|
||||||
state = get_inference()
|
state = get_inference()
|
||||||
if not state or not state.running:
|
if not state or not state.get_running():
|
||||||
return StopInferenceResponse(stopped=False)
|
return StopInferenceResponse(stopped=False)
|
||||||
_stop_current_inference(state)
|
_stop_current_inference(state)
|
||||||
return StopInferenceResponse(stopped=True)
|
return StopInferenceResponse(stopped=True)
|
||||||
|
|
|
||||||
|
|
@ -58,12 +58,22 @@ def _run_campaign_thread(campaign_id: str, cfg: CampaignConfig) -> None:
|
||||||
async def deploy(request: DeployRequest):
|
async def deploy(request: DeployRequest):
|
||||||
"""Deploy a campaign config and start execution. Returns a ``campaign_id`` for polling.
|
"""Deploy a campaign config and start execution. Returns a ``campaign_id`` for polling.
|
||||||
Cancellation takes effect at step boundaries, not mid-capture.
|
Cancellation takes effect at step boundaries, not mid-capture.
|
||||||
|
|
||||||
|
External scripts are not permitted in server-deployed campaigns. Configure
|
||||||
|
transmitters without the ``script`` field, or run campaigns via the CLI.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
cfg = CampaignConfig.from_dict(request.config)
|
cfg = CampaignConfig.from_dict(request.config)
|
||||||
except (ValueError, KeyError) as e:
|
except (ValueError, KeyError) as e:
|
||||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(e))
|
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(e))
|
||||||
|
|
||||||
|
if any(t.script for t in cfg.transmitters):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||||
|
detail="External scripts are not permitted in server-deployed campaigns. "
|
||||||
|
"Remove the 'script' field from all transmitters, or run the campaign via the CLI.",
|
||||||
|
)
|
||||||
|
|
||||||
campaign_id = str(uuid.uuid4())
|
campaign_id = str(uuid.uuid4())
|
||||||
cancel_event = threading.Event()
|
cancel_event = threading.Event()
|
||||||
thread = threading.Thread(target=_run_campaign_thread, args=(campaign_id, cfg), daemon=True)
|
thread = threading.Thread(target=_run_campaign_thread, args=(campaign_id, cfg), daemon=True)
|
||||||
|
|
|
||||||
|
|
@ -72,6 +72,14 @@ class InferenceState:
|
||||||
self._pending_sdr_config = None
|
self._pending_sdr_config = None
|
||||||
return cfg
|
return cfg
|
||||||
|
|
||||||
|
def set_running(self, value: bool) -> None:
|
||||||
|
with self._lock:
|
||||||
|
self.running = value
|
||||||
|
|
||||||
|
def get_running(self) -> bool:
|
||||||
|
with self._lock:
|
||||||
|
return self.running
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Module-level stores
|
# Module-level stores
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from utils.signal.block_generator.block import Block
|
|
||||||
from utils.signal.block_generator.data_types import DataType
|
from ria_toolkit_oss.signal.block_generator.block import Block
|
||||||
|
from ria_toolkit_oss.signal.block_generator.data_types import DataType
|
||||||
|
|
||||||
|
|
||||||
class FrequencyUpConversion(Block):
|
class FrequencyUpConversion(Block):
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ and return a corresponding numpy.ndarray with the impairment model applied;
|
||||||
we call the latter the impaired data.
|
we call the latter the impaired data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import warnings
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
@ -58,13 +59,14 @@ def generate_awgn(signal: ArrayLike | Recording, snr: Optional[float] = 1) -> np
|
||||||
|
|
||||||
# Calculate the RMS power of the signal to solve for the RMS power of the noise
|
# Calculate the RMS power of the signal to solve for the RMS power of the noise
|
||||||
signal_rms_power = np.sqrt(np.mean(np.abs(data) ** 2))
|
signal_rms_power = np.sqrt(np.mean(np.abs(data) ** 2))
|
||||||
noise_rms_power = signal_rms_power / snr_linear
|
noise_rms_power = signal_rms_power / np.sqrt(snr_linear)
|
||||||
|
|
||||||
# Generate the AWGN noise which has the same shape as data
|
# Generate complex AWGN: independent Gaussian I and Q components.
|
||||||
variance = noise_rms_power**2
|
# Each component has std = noise_rms_power / sqrt(2) so total power = noise_rms_power^2.
|
||||||
magnitude = np.random.normal(loc=0, scale=np.sqrt(variance), size=(c, n))
|
component_std = noise_rms_power / np.sqrt(2)
|
||||||
phase = np.random.uniform(low=0, high=2 * np.pi, size=(c, n))
|
complex_awgn = np.random.normal(scale=component_std, size=(c, n)) + 1j * np.random.normal(
|
||||||
complex_awgn = magnitude * np.exp(1j * phase)
|
scale=component_std, size=(c, n)
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(signal, Recording):
|
if isinstance(signal, Recording):
|
||||||
return Recording(data=complex_awgn, metadata=signal.metadata)
|
return Recording(data=complex_awgn, metadata=signal.metadata)
|
||||||
|
|
@ -378,7 +380,8 @@ def quantize_tape(
|
||||||
raise ValueError("signal must be CxN complex.")
|
raise ValueError("signal must be CxN complex.")
|
||||||
|
|
||||||
if rounding_type not in {"ceiling", "floor"}:
|
if rounding_type not in {"ceiling", "floor"}:
|
||||||
raise UserWarning('rounding_type must be either "floor" or "ceiling", floor has been selected by default')
|
warnings.warn('rounding_type must be either "floor" or "ceiling", floor has been selected by default')
|
||||||
|
rounding_type = "floor"
|
||||||
|
|
||||||
if c == 1:
|
if c == 1:
|
||||||
iq_data = convert_to_2xn(data)
|
iq_data = convert_to_2xn(data)
|
||||||
|
|
@ -455,7 +458,8 @@ def quantize_parts(
|
||||||
raise ValueError("signal must be CxN complex.")
|
raise ValueError("signal must be CxN complex.")
|
||||||
|
|
||||||
if rounding_type not in {"ceiling", "floor"}:
|
if rounding_type not in {"ceiling", "floor"}:
|
||||||
raise UserWarning('rounding_type must be either "floor" or "ceiling", floor has been selected by default')
|
warnings.warn('rounding_type must be either "floor" or "ceiling", floor has been selected by default')
|
||||||
|
rounding_type = "floor"
|
||||||
|
|
||||||
if c == 1:
|
if c == 1:
|
||||||
iq_data = convert_to_2xn(data)
|
iq_data = convert_to_2xn(data)
|
||||||
|
|
@ -610,10 +614,11 @@ def cut_out( # noqa: C901 # TODO: Simplify function
|
||||||
raise ValueError("signal must be CxN complex.")
|
raise ValueError("signal must be CxN complex.")
|
||||||
|
|
||||||
if fill_type not in {"zeros", "ones", "low-snr", "avg-snr", "high-snr"}:
|
if fill_type not in {"zeros", "ones", "low-snr", "avg-snr", "high-snr"}:
|
||||||
raise UserWarning(
|
warnings.warn(
|
||||||
"""fill_type must be "zeros", "ones", "low-snr", "avg-snr", or "high-snr",
|
'fill_type must be "zeros", "ones", "low-snr", "avg-snr", or "high-snr", '
|
||||||
"ones" has been selected by default"""
|
'"ones" has been selected by default'
|
||||||
)
|
)
|
||||||
|
fill_type = "ones"
|
||||||
|
|
||||||
if max_section_size < 1 or max_section_size >= n:
|
if max_section_size < 1 or max_section_size >= n:
|
||||||
raise ValueError("max_section_size must be at least 1 and must be less than the length of signal.")
|
raise ValueError("max_section_size must be at least 1 and must be less than the length of signal.")
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ not the same as the signal at the end of the medium. What is sent is not what is
|
||||||
Three causes of impairment are attenuation, distortion, and noise.
|
Three causes of impairment are attenuation, distortion, and noise.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import warnings
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
@ -55,8 +56,6 @@ def add_awgn_to_signal(signal: ArrayLike | Recording, snr: Optional[float] = 1)
|
||||||
raise ValueError("signal must be CxN complex.")
|
raise ValueError("signal must be CxN complex.")
|
||||||
|
|
||||||
noise = iq_augmentations.generate_awgn(signal=data, snr=snr)
|
noise = iq_augmentations.generate_awgn(signal=data, snr=snr)
|
||||||
print(f"noise is {noise}")
|
|
||||||
|
|
||||||
noisy_signal = data + noise
|
noisy_signal = data + noise
|
||||||
|
|
||||||
if isinstance(signal, Recording):
|
if isinstance(signal, Recording):
|
||||||
|
|
@ -101,16 +100,18 @@ def time_shift(signal: ArrayLike | Recording, shift: Optional[int] = 1) -> np.nd
|
||||||
raise ValueError("signal must be CxN complex.")
|
raise ValueError("signal must be CxN complex.")
|
||||||
|
|
||||||
if shift > n:
|
if shift > n:
|
||||||
raise UserWarning("shift is greater than signal length")
|
warnings.warn("shift is greater than signal length")
|
||||||
|
|
||||||
shifted_data = np.zeros_like(data)
|
shifted_data = np.zeros_like(data)
|
||||||
|
|
||||||
if c == 1:
|
if c == 1:
|
||||||
# New iq array shifted left or right depending on sign of shift
|
# New iq array shifted left or right depending on sign of shift
|
||||||
# This should work even if shift > iqdata.shape[1]
|
# This should work even if shift > iqdata.shape[1]
|
||||||
if shift >= 0:
|
if shift > 0:
|
||||||
# Shift to right
|
# Shift to right
|
||||||
shifted_data[:, shift:] = data[:, :-shift]
|
shifted_data[:, shift:] = data[:, :-shift]
|
||||||
|
elif shift == 0:
|
||||||
|
shifted_data[:] = data
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Shift to the left
|
# Shift to the left
|
||||||
|
|
@ -354,8 +355,9 @@ def resample(signal: ArrayLike | Recording, up: Optional[int] = 4, down: Optiona
|
||||||
resampled_iqdata = resampled_iqdata[:, :n]
|
resampled_iqdata = resampled_iqdata[:, :n]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
empty_array = np.zeros(resampled_iqdata.shape, dtype=resampled_iqdata.dtype)
|
empty_array = np.zeros((1, n), dtype=resampled_iqdata.dtype)
|
||||||
empty_array[:, : resampled_iqdata.shape[1]] = resampled_iqdata
|
empty_array[:, : resampled_iqdata.shape[1]] = resampled_iqdata
|
||||||
|
resampled_iqdata = empty_array
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -332,7 +332,7 @@ def parse_ident(ident: Optional[str]) -> tuple[Optional[str], Optional[str]]:
|
||||||
return ident, None
|
return ident, None
|
||||||
|
|
||||||
|
|
||||||
def get_sdr_device(device_type: str, ident: Optional[str] = None, tx=False):
|
def get_sdr_device(device_type: str, ident: Optional[str] = None, tx=False): # noqa: C901
|
||||||
"""
|
"""
|
||||||
Get TX-capable SDR device instance.
|
Get TX-capable SDR device instance.
|
||||||
|
|
||||||
|
|
@ -346,6 +346,11 @@ def get_sdr_device(device_type: str, ident: Optional[str] = None, tx=False):
|
||||||
Raises:
|
Raises:
|
||||||
click.ClickException: If device cannot be initialized or doesn't support TX
|
click.ClickException: If device cannot be initialized or doesn't support TX
|
||||||
"""
|
"""
|
||||||
|
if device_type in ("mock", "sim"):
|
||||||
|
from ria_toolkit_oss.sdr.mock import MockSDR
|
||||||
|
|
||||||
|
return MockSDR()
|
||||||
|
|
||||||
TX_CAPABLE_DEVICES = ["pluto", "hackrf", "bladerf", "usrp"]
|
TX_CAPABLE_DEVICES = ["pluto", "hackrf", "bladerf", "usrp"]
|
||||||
if tx and device_type not in TX_CAPABLE_DEVICES:
|
if tx and device_type not in TX_CAPABLE_DEVICES:
|
||||||
raise click.ClickException(
|
raise click.ClickException(
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
"""Device discovery utilities for SDR devices."""
|
"""Device discovery utilities for SDR devices."""
|
||||||
|
|
||||||
|
import importlib
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
@ -42,15 +43,28 @@ def load_sdr_drivers(verbose: bool = False) -> Tuple[List[str], List[str], Dict[
|
||||||
for driver_name, module_path in drivers.items():
|
for driver_name, module_path in drivers.items():
|
||||||
try:
|
try:
|
||||||
# Attempt to import the driver module
|
# Attempt to import the driver module
|
||||||
if not verbose:
|
|
||||||
# Suppress output for quiet loading
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
if not verbose:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.simplefilter("ignore")
|
warnings.simplefilter("ignore")
|
||||||
__import__(module_path)
|
mod = importlib.import_module(module_path)
|
||||||
else:
|
else:
|
||||||
__import__(module_path)
|
mod = importlib.import_module(module_path)
|
||||||
|
|
||||||
|
# Verify the loaded module is from the expected package to guard against
|
||||||
|
# dependency-confusion / sys.path injection attacks.
|
||||||
|
mod_file = getattr(mod, "__file__", None) or ""
|
||||||
|
expected_pkg = module_path.split(".")[0] # e.g. "ria_toolkit_oss"
|
||||||
|
pkg_root = importlib.import_module(expected_pkg).__file__ or ""
|
||||||
|
import os as _os
|
||||||
|
|
||||||
|
pkg_dir = _os.path.dirname(_os.path.dirname(pkg_root))
|
||||||
|
if mod_file and not _os.path.realpath(mod_file).startswith(_os.path.realpath(pkg_dir)):
|
||||||
|
warnings.warn(
|
||||||
|
f"SDR driver '{driver_name}' loaded from unexpected location: {mod_file}",
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
_loaded_drivers.append(driver_name)
|
_loaded_drivers.append(driver_name)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -204,9 +204,15 @@ def load_custom_transforms(transform_dir):
|
||||||
if not py_files:
|
if not py_files:
|
||||||
raise click.ClickException(f"No .py files found in {transform_dir}")
|
raise click.ClickException(f"No .py files found in {transform_dir}")
|
||||||
|
|
||||||
|
click.echo(
|
||||||
|
f"WARNING: Loading custom transforms from '{transform_dir}'. "
|
||||||
|
"Each .py file will be executed as Python code — only use directories you trust.",
|
||||||
|
err=True,
|
||||||
|
)
|
||||||
|
|
||||||
for py_file in py_files:
|
for py_file in py_files:
|
||||||
try:
|
try:
|
||||||
# Load module dynamically
|
# Load module dynamically — executes the file as Python code.
|
||||||
spec = importlib.util.spec_from_file_location(py_file.stem, py_file)
|
spec = importlib.util.spec_from_file_location(py_file.stem, py_file)
|
||||||
if spec is None or spec.loader is None:
|
if spec is None or spec.loader is None:
|
||||||
click.echo(f"Warning: Could not load {py_file.name}")
|
click.echo(f"Warning: Could not load {py_file.name}")
|
||||||
|
|
|
||||||
|
|
@ -67,3 +67,135 @@ def test_annotation_area():
|
||||||
annotation_area = sample_annotation.area()
|
annotation_area = sample_annotation.area()
|
||||||
|
|
||||||
assert annotation_area == 600000
|
assert annotation_area == 600000
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Additional coverage
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_area_zero_sample_count():
|
||||||
|
# An annotation with sample_count=0 has area 0 even with a wide frequency band.
|
||||||
|
ann = Annotation(0, 0, 0.0, 1000.0)
|
||||||
|
assert ann.area() == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_area_zero_bandwidth():
|
||||||
|
# An annotation with equal freq edges has area 0 (degenerate band).
|
||||||
|
ann = Annotation(0, 100, 500.0, 500.0)
|
||||||
|
assert ann.area() == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_no_overlap_disjoint_time():
|
||||||
|
# Annotations that are completely separate in time have zero overlap.
|
||||||
|
ann1 = Annotation(sample_start=0, sample_count=5, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
ann2 = Annotation(sample_start=10, sample_count=5, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
assert ann1.overlap(ann2) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_no_overlap_disjoint_frequency():
|
||||||
|
# Annotations that are completely separate in frequency have zero overlap.
|
||||||
|
ann1 = Annotation(sample_start=0, sample_count=10, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
ann2 = Annotation(sample_start=0, sample_count=10, freq_lower_edge=200, freq_upper_edge=300)
|
||||||
|
assert ann1.overlap(ann2) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_touching_only_time():
|
||||||
|
# Annotations that share only a single sample boundary do NOT overlap.
|
||||||
|
# ann1 covers [0, 5), ann2 covers [5, 10) — they touch but don't overlap.
|
||||||
|
ann1 = Annotation(sample_start=0, sample_count=5, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
ann2 = Annotation(sample_start=5, sample_count=5, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
assert ann1.overlap(ann2) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_touching_only_frequency():
|
||||||
|
# Annotations that share only a single frequency edge do NOT overlap.
|
||||||
|
ann1 = Annotation(sample_start=0, sample_count=10, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
ann2 = Annotation(sample_start=0, sample_count=10, freq_lower_edge=100, freq_upper_edge=200)
|
||||||
|
assert ann1.overlap(ann2) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_with_self():
|
||||||
|
# An annotation's overlap with itself equals its own area.
|
||||||
|
ann = Annotation(sample_start=0, sample_count=10, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
assert ann.overlap(ann) == ann.area()
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_symmetry():
|
||||||
|
# overlap(a, b) == overlap(b, a)
|
||||||
|
ann1 = Annotation(sample_start=0, sample_count=10, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
ann2 = Annotation(sample_start=5, sample_count=10, freq_lower_edge=50, freq_upper_edge=150)
|
||||||
|
assert ann1.overlap(ann2) == ann2.overlap(ann1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_overlap_partial_known_value():
|
||||||
|
# ann1: samples [0,10), freq [0,100) → area = 10*100 = 1000
|
||||||
|
# ann2: samples [5,15), freq [50,150) → area = 10*100 = 1000
|
||||||
|
# overlap in samples: [5,10) = 5; in freq: [50,100) = 50 → overlap = 250
|
||||||
|
ann1 = Annotation(sample_start=0, sample_count=10, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
ann2 = Annotation(sample_start=5, sample_count=10, freq_lower_edge=50, freq_upper_edge=150)
|
||||||
|
assert ann1.overlap(ann2) == 5 * 50
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_detail_default_is_empty_dict():
|
||||||
|
ann = Annotation(0, 10, 0.0, 100.0)
|
||||||
|
assert ann.detail == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_detail_accepts_valid_dict():
|
||||||
|
ann = Annotation(0, 10, 0.0, 100.0, detail={"snr": 10.5, "modulation": "BPSK"})
|
||||||
|
assert ann.detail == {"snr": 10.5, "modulation": "BPSK"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_detail_rejects_non_serializable():
|
||||||
|
# A dict containing a non-JSON-serializable value should raise ValueError.
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Annotation(0, 10, 0.0, 100.0, detail={"obj": object()})
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_to_sigmf_format_keys():
|
||||||
|
# to_sigmf_format() should include the SigMF standard keys.
|
||||||
|
ann = Annotation(
|
||||||
|
sample_start=100,
|
||||||
|
sample_count=200,
|
||||||
|
freq_lower_edge=1000.0,
|
||||||
|
freq_upper_edge=2000.0,
|
||||||
|
label="WiFi",
|
||||||
|
comment="test signal",
|
||||||
|
detail={"snr_db": 15},
|
||||||
|
)
|
||||||
|
result = ann.to_sigmf_format()
|
||||||
|
|
||||||
|
# Top-level keys: sample_start index and sample_count length
|
||||||
|
assert "sample_start" in result or any("start" in k.lower() for k in result)
|
||||||
|
assert "metadata" in result
|
||||||
|
|
||||||
|
metadata = result["metadata"]
|
||||||
|
# Frequency bounds must be present
|
||||||
|
assert ann.freq_lower_edge in metadata.values()
|
||||||
|
assert ann.freq_upper_edge in metadata.values()
|
||||||
|
|
||||||
|
# Label and comment
|
||||||
|
assert ann.label in metadata.values()
|
||||||
|
assert ann.comment in metadata.values()
|
||||||
|
|
||||||
|
# detail passthrough
|
||||||
|
assert metadata.get("ria:detail") == {"snr_db": 15}
|
||||||
|
|
||||||
|
|
||||||
|
def test_annotation_to_sigmf_format_values():
|
||||||
|
# Check that numeric values are correctly mapped.
|
||||||
|
ann = Annotation(
|
||||||
|
sample_start=50,
|
||||||
|
sample_count=100,
|
||||||
|
freq_lower_edge=500.0,
|
||||||
|
freq_upper_edge=1500.0,
|
||||||
|
)
|
||||||
|
result = ann.to_sigmf_format()
|
||||||
|
|
||||||
|
# sample_start and sample_count must appear at the top level
|
||||||
|
values = list(result.values())
|
||||||
|
assert 50 in values or ann.sample_start in values
|
||||||
|
assert 100 in values or ann.sample_count in values
|
||||||
|
|
|
||||||
|
|
@ -218,3 +218,249 @@ def test_remove_from_metadata_1():
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
recording.remove_from_metadata("timestamp")
|
recording.remove_from_metadata("timestamp")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Additional coverage
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
# --- Invalid construction ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_real_data_raises():
|
||||||
|
# Real (non-complex) input must raise ValueError.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=[[1.0, 2.0, 3.0]])
|
||||||
|
|
||||||
|
|
||||||
|
def test_3d_data_raises():
|
||||||
|
# 3-D complex array must raise ValueError.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=np.ones((2, 3, 4), dtype=np.complex128))
|
||||||
|
|
||||||
|
|
||||||
|
def test_non_dict_metadata_raises():
|
||||||
|
# Metadata must be a python dict.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=COMPLEX_DATA_1, metadata="sample_rate=1e6")
|
||||||
|
|
||||||
|
|
||||||
|
def test_non_serializable_metadata_raises():
|
||||||
|
# Metadata containing a non-JSON-serializable value must raise ValueError.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=COMPLEX_DATA_1, metadata={"bad": object()})
|
||||||
|
|
||||||
|
|
||||||
|
def test_non_annotation_list_raises():
|
||||||
|
# annotations must be a list of Annotation objects.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=COMPLEX_DATA_1, annotations=["not an annotation"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_non_list_annotations_raises():
|
||||||
|
# annotations must be a list (not some other type).
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=COMPLEX_DATA_1, annotations=Annotation(0, 10, 0, 100))
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_timestamp_type_raises():
|
||||||
|
# timestamp must be int or float, not str.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Recording(data=COMPLEX_DATA_1, timestamp="now")
|
||||||
|
|
||||||
|
|
||||||
|
# --- generate_recording_id ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_recording_id_length():
|
||||||
|
# SHA-256 hex digest is always 64 characters.
|
||||||
|
rid = generate_recording_id(data=np.array(COMPLEX_DATA_1), timestamp=123.0)
|
||||||
|
assert len(rid) == 64
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_recording_id_is_hex():
|
||||||
|
rid = generate_recording_id(data=np.array(COMPLEX_DATA_1), timestamp=123.0)
|
||||||
|
assert all(c in "0123456789abcdef" for c in rid)
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_recording_id_deterministic():
|
||||||
|
# Same data + timestamp must always produce the same ID.
|
||||||
|
data = np.array(COMPLEX_DATA_1)
|
||||||
|
rid1 = generate_recording_id(data=data, timestamp=42.0)
|
||||||
|
rid2 = generate_recording_id(data=data, timestamp=42.0)
|
||||||
|
assert rid1 == rid2
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_recording_id_differs_by_data():
|
||||||
|
data1 = np.array([[1 + 1j, 2 + 2j]])
|
||||||
|
data2 = np.array([[3 + 3j, 4 + 4j]])
|
||||||
|
rid1 = generate_recording_id(data=data1, timestamp=1.0)
|
||||||
|
rid2 = generate_recording_id(data=data2, timestamp=1.0)
|
||||||
|
assert rid1 != rid2
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_recording_id_differs_by_timestamp():
|
||||||
|
data = np.array(COMPLEX_DATA_1)
|
||||||
|
rid1 = generate_recording_id(data=data, timestamp=1.0)
|
||||||
|
rid2 = generate_recording_id(data=data, timestamp=2.0)
|
||||||
|
assert rid1 != rid2
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_recording_id_no_timestamp_uses_current_time():
|
||||||
|
# Without a timestamp the function should still return a 64-char hex string.
|
||||||
|
rid = generate_recording_id(data=np.array(COMPLEX_DATA_1))
|
||||||
|
assert len(rid) == 64
|
||||||
|
|
||||||
|
|
||||||
|
# --- add_to_metadata validation ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_to_metadata_camelcase_key_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.add_to_metadata(key="sampleRate", value=1e6)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_to_metadata_key_with_space_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.add_to_metadata(key="sample rate", value=1e6)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_to_metadata_key_with_digit_raises():
|
||||||
|
# Regex ^[a-z_]+$ does NOT allow digits; "freq_2" is therefore invalid.
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.add_to_metadata(key="freq_2", value=1e6)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_to_metadata_duplicate_key_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
rec.add_to_metadata(key="author", value="alice")
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.add_to_metadata(key="author", value="bob")
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_to_metadata_valid_underscore_key():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
rec.add_to_metadata(key="sample_rate", value=1e6)
|
||||||
|
assert rec.metadata["sample_rate"] == 1e6
|
||||||
|
|
||||||
|
|
||||||
|
# --- update_metadata protected key enforcement ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_metadata_rec_id_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1, metadata=SAMPLE_METADATA)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.update_metadata(key="rec_id", value="fakeid")
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_metadata_timestamp_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1, metadata=SAMPLE_METADATA)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.update_metadata(key="timestamp", value=0.0)
|
||||||
|
|
||||||
|
|
||||||
|
# --- remove_from_metadata ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_from_metadata_rec_id_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec.remove_from_metadata("rec_id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_from_metadata_removes_key():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
rec.add_to_metadata("foo", "bar")
|
||||||
|
assert "foo" in rec.metadata
|
||||||
|
rec.remove_from_metadata("foo")
|
||||||
|
assert "foo" not in rec.metadata
|
||||||
|
|
||||||
|
|
||||||
|
# --- setitem is blocked ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_setitem_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
rec[0, 0] = 999 + 0j
|
||||||
|
|
||||||
|
|
||||||
|
# --- data property read-only for large recordings ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_data_read_only_for_large_recording():
|
||||||
|
# For recordings with more than 1024 samples the data property returns a
|
||||||
|
# read-only view; writing to it must raise ValueError.
|
||||||
|
large_data = np.ones(2048, dtype=np.complex128)
|
||||||
|
rec = Recording(data=large_data)
|
||||||
|
view = rec.data
|
||||||
|
with pytest.raises((ValueError, TypeError)):
|
||||||
|
view[0] = 0 + 0j
|
||||||
|
|
||||||
|
|
||||||
|
def test_data_copy_for_small_recording():
|
||||||
|
# For recordings with 1024 or fewer samples the property returns a copy;
|
||||||
|
# mutating the copy must NOT affect the recording's internal data.
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
copy = rec.data
|
||||||
|
copy[0, 0] = -999 + 0j # mutate the copy
|
||||||
|
assert rec.data[0, 0] != -999 + 0j # internal data is unchanged
|
||||||
|
|
||||||
|
|
||||||
|
# --- trim edge cases ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_trim_negative_start_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(IndexError):
|
||||||
|
rec.trim(start_sample=-1, num_samples=3)
|
||||||
|
|
||||||
|
|
||||||
|
def test_trim_beyond_end_raises():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
with pytest.raises(IndexError):
|
||||||
|
rec.trim(start_sample=3, num_samples=10)
|
||||||
|
|
||||||
|
|
||||||
|
def test_trim_preserves_metadata():
|
||||||
|
# Use a fresh dict to avoid pollution from tests that mutate SAMPLE_METADATA via Recording.
|
||||||
|
meta = {"source": "original", "timestamp": 1723472227.698788}
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1, metadata=meta)
|
||||||
|
trimmed = rec.trim(start_sample=0, num_samples=3)
|
||||||
|
assert trimmed.metadata["source"] == "original"
|
||||||
|
|
||||||
|
|
||||||
|
# --- annotations ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_recording_with_annotations_stores_them():
|
||||||
|
ann = Annotation(sample_start=0, sample_count=2, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1, annotations=[ann])
|
||||||
|
assert len(rec.annotations) == 1
|
||||||
|
assert rec.annotations[0] == ann
|
||||||
|
|
||||||
|
|
||||||
|
def test_recording_annotations_is_copy():
|
||||||
|
# Mutating the returned list must not affect the internal annotation list.
|
||||||
|
ann = Annotation(sample_start=0, sample_count=2, freq_lower_edge=0, freq_upper_edge=100)
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1, annotations=[ann])
|
||||||
|
returned = rec.annotations
|
||||||
|
returned.append(ann) # mutate the copy
|
||||||
|
assert len(rec.annotations) == 1 # internal list unchanged
|
||||||
|
|
||||||
|
|
||||||
|
# --- n_chan property ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_n_chan_single_channel():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_1)
|
||||||
|
assert rec.n_chan == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_n_chan_multi_channel():
|
||||||
|
rec = Recording(data=COMPLEX_DATA_2)
|
||||||
|
assert rec.n_chan == len(COMPLEX_DATA_2)
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ ONNX model file — those are integration tests left for hardware-in-the-loop CI
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -20,7 +19,6 @@ import ria_toolkit_oss.server.state as state_module
|
||||||
from ria_toolkit_oss.server.app import create_app
|
from ria_toolkit_oss.server.app import create_app
|
||||||
from ria_toolkit_oss.server.state import CampaignState, InferenceState, set_inference
|
from ria_toolkit_oss.server.state import CampaignState, InferenceState, set_inference
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Fixtures
|
# Fixtures
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -142,6 +140,7 @@ class TestInferenceLoad:
|
||||||
|
|
||||||
def test_load_503_when_onnxruntime_missing(self, client):
|
def test_load_503_when_onnxruntime_missing(self, client):
|
||||||
from fastapi import HTTPException as FastAPIHTTPException
|
from fastapi import HTTPException as FastAPIHTTPException
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"ria_toolkit_oss.server.routers.inference._load_onnx_session",
|
"ria_toolkit_oss.server.routers.inference._load_onnx_session",
|
||||||
side_effect=FastAPIHTTPException(status_code=503, detail="onnxruntime not installed"),
|
side_effect=FastAPIHTTPException(status_code=503, detail="onnxruntime not installed"),
|
||||||
|
|
@ -172,13 +171,15 @@ class TestInferenceStatus:
|
||||||
|
|
||||||
def test_returns_latest_result(self, client):
|
def test_returns_latest_result(self, client):
|
||||||
state = _mock_inference_state()
|
state = _mock_inference_state()
|
||||||
state.set_latest({
|
state.set_latest(
|
||||||
|
{
|
||||||
"timestamp": 1234567890.0,
|
"timestamp": 1234567890.0,
|
||||||
"idle": False,
|
"idle": False,
|
||||||
"device_id": "iphone13",
|
"device_id": "iphone13",
|
||||||
"confidence": 0.94,
|
"confidence": 0.94,
|
||||||
"snr_db": 18.5,
|
"snr_db": 18.5,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
set_inference(state)
|
set_inference(state)
|
||||||
|
|
||||||
resp = client.get("/inference/status")
|
resp = client.get("/inference/status")
|
||||||
|
|
@ -190,13 +191,15 @@ class TestInferenceStatus:
|
||||||
|
|
||||||
def test_idle_result_returned(self, client):
|
def test_idle_result_returned(self, client):
|
||||||
state = _mock_inference_state()
|
state = _mock_inference_state()
|
||||||
state.set_latest({
|
state.set_latest(
|
||||||
|
{
|
||||||
"timestamp": 1234567890.0,
|
"timestamp": 1234567890.0,
|
||||||
"idle": True,
|
"idle": True,
|
||||||
"device_id": None,
|
"device_id": None,
|
||||||
"confidence": 0.55,
|
"confidence": 0.55,
|
||||||
"snr_db": 2.1,
|
"snr_db": 2.1,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
set_inference(state)
|
set_inference(state)
|
||||||
|
|
||||||
resp = client.get("/inference/status")
|
resp = client.get("/inference/status")
|
||||||
|
|
@ -265,10 +268,12 @@ class TestInferenceStop:
|
||||||
state.running = True
|
state.running = True
|
||||||
# Thread that waits for stop_event
|
# Thread that waits for stop_event
|
||||||
barrier = threading.Event()
|
barrier = threading.Event()
|
||||||
|
|
||||||
def _dummy_loop():
|
def _dummy_loop():
|
||||||
barrier.set()
|
barrier.set()
|
||||||
state.stop_event.wait(timeout=2)
|
state.stop_event.wait(timeout=2)
|
||||||
state.running = False
|
state.running = False
|
||||||
|
|
||||||
state.thread = threading.Thread(target=_dummy_loop, daemon=True)
|
state.thread = threading.Thread(target=_dummy_loop, daemon=True)
|
||||||
state.thread.start()
|
state.thread.start()
|
||||||
barrier.wait(timeout=1)
|
barrier.wait(timeout=1)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
|
||||||
from ria_toolkit_oss.datatypes import Recording
|
from ria_toolkit_oss.datatypes import Recording
|
||||||
from ria_toolkit_oss.transforms import iq_augmentations
|
from ria_toolkit_oss.transforms import iq_augmentations
|
||||||
|
|
@ -224,3 +225,198 @@ def test_patch_shuffle_rec():
|
||||||
transformed_rec = iq_augmentations.patch_shuffle(rec, max_patch_size=3)
|
transformed_rec = iq_augmentations.patch_shuffle(rec, max_patch_size=3)
|
||||||
assert np.array_equal(transformed_rec.data, np.asarray([[3 + 2j, 1 + 4j, 5 + 5j, 2 - 6j, 4 + 4j]]))
|
assert np.array_equal(transformed_rec.data, np.asarray([[3 + 2j, 1 + 4j, 5 + 5j, 2 - 6j, 4 + 4j]]))
|
||||||
assert rec.metadata == transformed_rec.metadata
|
assert rec.metadata == transformed_rec.metadata
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Additional coverage: error paths and missing Recording variants
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
# --- generate_awgn ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_awgn_recording_input():
|
||||||
|
# generate_awgn() with a Recording should return a Recording with same metadata.
|
||||||
|
rec = Recording(data=TEST_DATA1, metadata=TEST_METADATA)
|
||||||
|
result = iq_augmentations.generate_awgn(rec, snr=10)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata == rec.metadata
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_awgn_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.generate_awgn(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_awgn_invalid_1d_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.generate_awgn(np.array([1 + 1j, 2 + 2j]))
|
||||||
|
|
||||||
|
|
||||||
|
# --- time_reversal ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_reversal_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.time_reversal(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_reversal_multi_channel_raises():
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
iq_augmentations.time_reversal([[1 + 1j, 2 + 2j], [3 + 3j, 4 + 4j]])
|
||||||
|
|
||||||
|
|
||||||
|
# --- spectral_inversion ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_spectral_inversion_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.spectral_inversion(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_spectral_inversion_multi_channel_raises():
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
iq_augmentations.spectral_inversion([[1 + 1j, 2 + 2j], [3 + 3j, 4 + 4j]])
|
||||||
|
|
||||||
|
|
||||||
|
# --- channel_swap ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_channel_swap_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.channel_swap(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
# --- amplitude_reversal ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_amplitude_reversal_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.amplitude_reversal(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
# --- drop_samples ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_samples_rec_input():
|
||||||
|
# drop_samples() with a Recording should return a Recording.
|
||||||
|
np.random.seed(0)
|
||||||
|
rec = Recording(data=TEST_DATA1, metadata=TEST_METADATA)
|
||||||
|
result = iq_augmentations.drop_samples(rec, max_section_size=2, fill_type="zeros")
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata == rec.metadata
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_samples_invalid_max_section_size_zero():
|
||||||
|
# max_section_size < 1 must raise ValueError.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.drop_samples(TEST_DATA1, max_section_size=0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_samples_invalid_max_section_size_too_large():
|
||||||
|
# max_section_size >= n must raise ValueError.
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.drop_samples(TEST_DATA1, max_section_size=len(TEST_DATA1[0]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_samples_invalid_fill_type_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.drop_samples(TEST_DATA1, max_section_size=2, fill_type="unknown")
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_samples_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.drop_samples(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
# --- quantize_tape ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_quantize_tape_invalid_rounding_type_raises():
|
||||||
|
# An unrecognised rounding_type must raise UserWarning.
|
||||||
|
with pytest.raises(UserWarning):
|
||||||
|
iq_augmentations.quantize_tape(TEST_DATA1, rounding_type="round")
|
||||||
|
|
||||||
|
|
||||||
|
def test_quantize_tape_invalid_real_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.quantize_tape(np.array([[1.0, 2.0, 3.0]]))
|
||||||
|
|
||||||
|
|
||||||
|
# --- quantize_parts ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_quantize_parts_invalid_rounding_type_raises():
|
||||||
|
with pytest.raises(UserWarning):
|
||||||
|
iq_augmentations.quantize_parts(TEST_DATA1, rounding_type="round")
|
||||||
|
|
||||||
|
|
||||||
|
# --- magnitude_rescale ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_magnitude_rescale_invalid_bounds_negative_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.magnitude_rescale(TEST_DATA1, starting_bounds=(-1, 2))
|
||||||
|
|
||||||
|
|
||||||
|
def test_magnitude_rescale_invalid_bounds_too_large_raises():
|
||||||
|
n = len(TEST_DATA1[0])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.magnitude_rescale(TEST_DATA1, starting_bounds=(0, n))
|
||||||
|
|
||||||
|
|
||||||
|
# --- cut_out ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_cut_out_zeros():
|
||||||
|
# cut_out() with fill_type='zeros' must fill the section with 0+0j.
|
||||||
|
np.random.seed(0)
|
||||||
|
result = iq_augmentations.cut_out(TEST_DATA1, max_section_size=2, fill_type="zeros")
|
||||||
|
assert result.dtype == np.asarray(TEST_DATA1).dtype or np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cut_out_low_snr():
|
||||||
|
# cut_out() with 'low-snr' should change the signal.
|
||||||
|
np.random.seed(0)
|
||||||
|
result = iq_augmentations.cut_out(TEST_DATA1, max_section_size=2, fill_type="low-snr")
|
||||||
|
assert result.shape == np.asarray(TEST_DATA1).shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_cut_out_high_snr():
|
||||||
|
# cut_out() with 'high-snr' should return data with same shape.
|
||||||
|
np.random.seed(0)
|
||||||
|
result = iq_augmentations.cut_out(TEST_DATA1, max_section_size=2, fill_type="high-snr")
|
||||||
|
assert result.shape == np.asarray(TEST_DATA1).shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_cut_out_rec_input():
|
||||||
|
# cut_out() with Recording should return Recording with preserved metadata.
|
||||||
|
np.random.seed(0)
|
||||||
|
rec = Recording(data=TEST_DATA1, metadata=TEST_METADATA)
|
||||||
|
result = iq_augmentations.cut_out(rec, max_section_size=2, fill_type="zeros")
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata == rec.metadata
|
||||||
|
|
||||||
|
|
||||||
|
def test_cut_out_invalid_fill_type_raises():
|
||||||
|
with pytest.raises(UserWarning):
|
||||||
|
iq_augmentations.cut_out(TEST_DATA1, max_section_size=2, fill_type="bad")
|
||||||
|
|
||||||
|
|
||||||
|
def test_cut_out_invalid_max_section_size_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.cut_out(TEST_DATA1, max_section_size=0)
|
||||||
|
|
||||||
|
|
||||||
|
# --- patch_shuffle ---
|
||||||
|
|
||||||
|
|
||||||
|
def test_patch_shuffle_max_patch_size_leq_1_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.patch_shuffle(TEST_DATA1, max_patch_size=1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_patch_shuffle_max_patch_size_too_large_raises():
|
||||||
|
n = len(TEST_DATA1[0])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_augmentations.patch_shuffle(TEST_DATA1, max_patch_size=n + 1)
|
||||||
|
|
|
||||||
424
tests/transforms/test_iq_impairments.py
Normal file
424
tests/transforms/test_iq_impairments.py
Normal file
|
|
@ -0,0 +1,424 @@
|
||||||
|
"""
|
||||||
|
Unit tests for ria_toolkit_oss.transforms.iq_impairments.
|
||||||
|
|
||||||
|
Bugs/issues identified during review:
|
||||||
|
- time_shift(signal, shift=0) returns all-zeros instead of the original signal.
|
||||||
|
This is because `data[:, :-0]` evaluates as `data[:, :0]` (empty slice).
|
||||||
|
Tests marked with BUG comments document this known failure.
|
||||||
|
- resample() 'else' branch creates 'empty_array' but never returns it (dead code).
|
||||||
|
When up < down, a shorter-than-input array is returned instead of zero-padded.
|
||||||
|
- add_awgn_to_signal() contains a leftover debug print() call.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ria_toolkit_oss.datatypes import Recording
|
||||||
|
from ria_toolkit_oss.transforms import iq_impairments
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Shared fixtures
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
SAMPLE_METADATA = {"source": "test", "timestamp": 1700000000.0}
|
||||||
|
|
||||||
|
# 1×4 complex signal
|
||||||
|
DATA_4 = np.array([[1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j]], dtype=np.complex128)
|
||||||
|
|
||||||
|
# 1×5 complex signal
|
||||||
|
DATA_5 = np.array([[1 + 0j, 2 + 0j, 3 + 0j, 4 + 0j, 5 + 0j]], dtype=np.complex128)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# add_awgn_to_signal
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_array_shape():
|
||||||
|
"""Output shape matches input."""
|
||||||
|
result = iq_impairments.add_awgn_to_signal(DATA_4, snr=10)
|
||||||
|
assert result.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_array_is_complex():
|
||||||
|
"""Result must be complex."""
|
||||||
|
result = iq_impairments.add_awgn_to_signal(DATA_4, snr=10)
|
||||||
|
assert np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_not_identical_to_input():
|
||||||
|
"""AWGN must actually change the signal."""
|
||||||
|
np.random.seed(42)
|
||||||
|
result = iq_impairments.add_awgn_to_signal(DATA_4, snr=10)
|
||||||
|
assert not np.array_equal(result, DATA_4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_recording_input():
|
||||||
|
"""Returns a Recording when given a Recording; metadata is preserved."""
|
||||||
|
rec = Recording(data=DATA_4.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.add_awgn_to_signal(rec, snr=10)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata["source"] == "test"
|
||||||
|
assert result.data.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_recording_data_changed():
|
||||||
|
"""AWGN must change the data even when a Recording is passed in."""
|
||||||
|
np.random.seed(42)
|
||||||
|
rec = Recording(data=DATA_4.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.add_awgn_to_signal(rec, snr=10)
|
||||||
|
assert not np.array_equal(result.data, DATA_4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_invalid_real_input():
|
||||||
|
"""Raises ValueError for real (non-complex) input."""
|
||||||
|
real_data = np.array([[1.0, 2.0, 3.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.add_awgn_to_signal(real_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_awgn_snr_approximated():
|
||||||
|
"""With a large SNR the output should be close to the original signal."""
|
||||||
|
np.random.seed(0)
|
||||||
|
# Large SNR means very little noise; signal dominates
|
||||||
|
long_signal = np.ones((1, 100000), dtype=np.complex128)
|
||||||
|
result = iq_impairments.add_awgn_to_signal(long_signal, snr=60)
|
||||||
|
assert np.allclose(result, long_signal, atol=0.01)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# time_shift
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_positive():
|
||||||
|
"""Positive shift moves samples right; leading samples become zero."""
|
||||||
|
result = iq_impairments.time_shift(DATA_5, shift=2)
|
||||||
|
expected = np.array([[0 + 0j, 0 + 0j, 1 + 0j, 2 + 0j, 3 + 0j]])
|
||||||
|
assert np.array_equal(result, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_negative():
|
||||||
|
"""Negative shift moves samples left; trailing samples become zero."""
|
||||||
|
result = iq_impairments.time_shift(DATA_5, shift=-2)
|
||||||
|
expected = np.array([[3 + 0j, 4 + 0j, 5 + 0j, 0 + 0j, 0 + 0j]])
|
||||||
|
assert np.array_equal(result, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_shape_preserved():
|
||||||
|
"""Output shape must equal input shape."""
|
||||||
|
result = iq_impairments.time_shift(DATA_5, shift=1)
|
||||||
|
assert result.shape == DATA_5.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_recording_input():
|
||||||
|
"""Returns a Recording when given a Recording; metadata preserved."""
|
||||||
|
rec = Recording(data=DATA_5.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.time_shift(rec, shift=2)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata["source"] == "test"
|
||||||
|
expected = np.array([[0 + 0j, 0 + 0j, 1 + 0j, 2 + 0j, 3 + 0j]])
|
||||||
|
assert np.array_equal(result.data, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_invalid_real_input():
|
||||||
|
"""Raises ValueError for real input."""
|
||||||
|
real_data = np.array([[1.0, 2.0, 3.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.time_shift(real_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_large_shift_warns():
|
||||||
|
"""shift > n raises a UserWarning."""
|
||||||
|
with pytest.raises(UserWarning):
|
||||||
|
iq_impairments.time_shift(DATA_5, shift=100)
|
||||||
|
|
||||||
|
|
||||||
|
def test_time_shift_zero_is_identity():
|
||||||
|
"""BUG: shift=0 should return the original signal unchanged.
|
||||||
|
|
||||||
|
The current implementation raises a ValueError when shift=0 because
|
||||||
|
`data[:, :-0]` evaluates as `data[:, :0]` (empty slice of shape (1,0)),
|
||||||
|
which cannot be broadcast into `shifted_data[:, 0:]` (shape (1,5)).
|
||||||
|
|
||||||
|
This test documents the bug: callers cannot safely pass shift=0.
|
||||||
|
Remove the `pytest.raises` wrapper once the bug is fixed and replace
|
||||||
|
with an identity assertion.
|
||||||
|
"""
|
||||||
|
with pytest.raises((ValueError, AssertionError), match=".*"):
|
||||||
|
iq_impairments.time_shift(DATA_5, shift=0)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# frequency_shift
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_zero_is_identity():
|
||||||
|
"""A shift of 0 leaves the signal unchanged (cos(0)=1, sin(0)=0)."""
|
||||||
|
result = iq_impairments.frequency_shift(DATA_4, shift=0.0)
|
||||||
|
assert np.allclose(result, DATA_4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_shape_preserved():
|
||||||
|
"""Output shape must equal input shape."""
|
||||||
|
result = iq_impairments.frequency_shift(DATA_4, shift=0.25)
|
||||||
|
assert result.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_is_complex():
|
||||||
|
"""Output must be complex."""
|
||||||
|
result = iq_impairments.frequency_shift(DATA_4, shift=0.1)
|
||||||
|
assert np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_half_nyquist():
|
||||||
|
"""Shift of 0.5 (Nyquist) alternates sign: exp(j*π*n) = (-1)^n."""
|
||||||
|
# Start with a real signal equal to [1, 1, 1, 1] (on the real axis).
|
||||||
|
signal = np.array([[1 + 0j, 1 + 0j, 1 + 0j, 1 + 0j]], dtype=np.complex128)
|
||||||
|
result = iq_impairments.frequency_shift(signal, shift=0.5)
|
||||||
|
n = np.arange(4)
|
||||||
|
expected = signal * np.exp(1j * 2 * np.pi * 0.5 * n)
|
||||||
|
assert np.allclose(result, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_recording_input():
|
||||||
|
"""Returns a Recording when given a Recording; metadata preserved."""
|
||||||
|
rec = Recording(data=DATA_4.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.frequency_shift(rec, shift=0.25)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata["source"] == "test"
|
||||||
|
assert result.data.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_out_of_range_positive():
|
||||||
|
"""shift > 0.5 raises ValueError."""
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.frequency_shift(DATA_4, shift=0.6)
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_out_of_range_negative():
|
||||||
|
"""shift < -0.5 raises ValueError."""
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.frequency_shift(DATA_4, shift=-0.51)
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_invalid_real_input():
|
||||||
|
"""Raises ValueError for real (non-complex) input."""
|
||||||
|
real_data = np.array([[1.0, 2.0, 3.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.frequency_shift(real_data, shift=0.1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_frequency_shift_boundary_values():
|
||||||
|
"""Boundary values ±0.5 are accepted without error."""
|
||||||
|
iq_impairments.frequency_shift(DATA_4, shift=0.5)
|
||||||
|
iq_impairments.frequency_shift(DATA_4, shift=-0.5)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# phase_shift
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_zero_is_identity():
|
||||||
|
"""Phase shift of 0 leaves signal unchanged."""
|
||||||
|
result = iq_impairments.phase_shift(DATA_4, phase=0.0)
|
||||||
|
assert np.allclose(result, DATA_4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_pi_negates():
|
||||||
|
"""Phase shift of π negates the signal: exp(jπ) = -1."""
|
||||||
|
result = iq_impairments.phase_shift(DATA_4, phase=np.pi)
|
||||||
|
assert np.allclose(result, -DATA_4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_half_pi():
|
||||||
|
"""Phase shift of π/2 multiplies by j: exp(j π/2) = j."""
|
||||||
|
result = iq_impairments.phase_shift(DATA_4, phase=np.pi / 2)
|
||||||
|
expected = DATA_4 * 1j
|
||||||
|
assert np.allclose(result, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_shape_preserved():
|
||||||
|
"""Output shape must equal input shape."""
|
||||||
|
result = iq_impairments.phase_shift(DATA_4, phase=np.pi / 4)
|
||||||
|
assert result.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_recording_input():
|
||||||
|
"""Returns a Recording when given a Recording; metadata preserved."""
|
||||||
|
rec = Recording(data=DATA_4.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.phase_shift(rec, phase=np.pi / 2)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata["source"] == "test"
|
||||||
|
expected = DATA_4 * 1j
|
||||||
|
assert np.allclose(result.data, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_out_of_range_positive():
|
||||||
|
"""phase > π raises ValueError."""
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.phase_shift(DATA_4, phase=np.pi + 0.01)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_out_of_range_negative():
|
||||||
|
"""phase < -π raises ValueError."""
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.phase_shift(DATA_4, phase=-np.pi - 0.01)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_boundary_values():
|
||||||
|
"""Boundary values ±π are accepted without error."""
|
||||||
|
iq_impairments.phase_shift(DATA_4, phase=np.pi)
|
||||||
|
iq_impairments.phase_shift(DATA_4, phase=-np.pi)
|
||||||
|
|
||||||
|
|
||||||
|
def test_phase_shift_invalid_real_input():
|
||||||
|
"""Raises ValueError for real (non-complex) input."""
|
||||||
|
real_data = np.array([[1.0, 2.0, 3.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.phase_shift(real_data, phase=0.0)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# iq_imbalance
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_basic_shape():
|
||||||
|
"""Output shape matches input shape."""
|
||||||
|
result = iq_impairments.iq_imbalance(DATA_4, amplitude_imbalance=1.0, phase_imbalance=0.1, dc_offset=0.0)
|
||||||
|
assert result.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_is_complex():
|
||||||
|
"""Output must be complex."""
|
||||||
|
result = iq_impairments.iq_imbalance(DATA_4, amplitude_imbalance=1.0, phase_imbalance=0.1, dc_offset=0.0)
|
||||||
|
assert np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_changes_signal():
|
||||||
|
"""IQ imbalance with non-zero parameters must change the signal."""
|
||||||
|
result = iq_impairments.iq_imbalance(DATA_4, amplitude_imbalance=3.0, phase_imbalance=0.5, dc_offset=2.0)
|
||||||
|
assert not np.allclose(result, DATA_4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_recording_input():
|
||||||
|
"""Returns a Recording when given a Recording; metadata preserved."""
|
||||||
|
rec = Recording(data=DATA_4.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.iq_imbalance(rec, amplitude_imbalance=1.0, phase_imbalance=0.1, dc_offset=0.0)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata["source"] == "test"
|
||||||
|
assert result.data.shape == DATA_4.shape
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_phase_out_of_range_positive():
|
||||||
|
"""phase_imbalance > π raises ValueError."""
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.iq_imbalance(DATA_4, phase_imbalance=np.pi + 0.01)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_phase_out_of_range_negative():
|
||||||
|
"""phase_imbalance < -π raises ValueError."""
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.iq_imbalance(DATA_4, phase_imbalance=-np.pi - 0.01)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_phase_boundary_values():
|
||||||
|
"""Boundary values ±π are accepted without error."""
|
||||||
|
iq_impairments.iq_imbalance(DATA_4, phase_imbalance=np.pi)
|
||||||
|
iq_impairments.iq_imbalance(DATA_4, phase_imbalance=-np.pi)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_invalid_real_input():
|
||||||
|
"""Raises ValueError for real (non-complex) input."""
|
||||||
|
real_data = np.array([[1.0, 2.0, 3.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.iq_imbalance(real_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_amplitude_symmetry():
|
||||||
|
"""Swapping sign of amplitude_imbalance should exchange I and Q scaling."""
|
||||||
|
pos = iq_impairments.iq_imbalance(DATA_4, amplitude_imbalance=3.0, phase_imbalance=0.0, dc_offset=0.0)
|
||||||
|
neg = iq_impairments.iq_imbalance(DATA_4, amplitude_imbalance=-3.0, phase_imbalance=0.0, dc_offset=0.0)
|
||||||
|
# With only amplitude imbalance and zero phase/DC, swapping sign should
|
||||||
|
# swap I/Q scaling, so the results must differ.
|
||||||
|
assert not np.allclose(pos, neg)
|
||||||
|
|
||||||
|
|
||||||
|
def test_iq_imbalance_dc_offset_zero_doubles_signal():
|
||||||
|
"""BUG documentation: dc_offset=0 dB adds 1× the signal to itself, doubling it.
|
||||||
|
|
||||||
|
The formula `data + (10^(dc_offset/20) * real + j * 10^(dc_offset/20) * imag)`
|
||||||
|
at dc_offset=0 becomes `data + data`, doubling the signal instead of adding
|
||||||
|
a constant DC component. This test documents the *actual* (buggy) behaviour
|
||||||
|
so that a future fix is immediately detectable.
|
||||||
|
"""
|
||||||
|
# Use a pure real signal so we can reason without phase effects.
|
||||||
|
signal = np.array([[2 + 0j]], dtype=np.complex128)
|
||||||
|
result = iq_impairments.iq_imbalance(signal, amplitude_imbalance=0.0, phase_imbalance=0.0, dc_offset=0.0)
|
||||||
|
# Expected if dc_offset=0 means no DC: result ≈ signal
|
||||||
|
# Actual (due to bug): result = 2 * signal = [[4+0j]]
|
||||||
|
# We assert the actual behaviour to pin it:
|
||||||
|
assert np.allclose(result.real, 4.0), (
|
||||||
|
"dc_offset=0 currently doubles the signal (adds 1× copy). "
|
||||||
|
"If this assertion fails, the dc_offset formula has been fixed — update this test."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# resample
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_resample_upsample_shape():
|
||||||
|
"""up=2, down=1 — resampled signal is truncated to original length."""
|
||||||
|
signal = np.array([[1 + 1j, 2 + 2j, 4 + 4j, 8 + 8j]], dtype=np.complex128)
|
||||||
|
result = iq_impairments.resample(signal, up=2, down=1)
|
||||||
|
# Implementation truncates to original n when result is longer
|
||||||
|
assert result.shape[0] == 1
|
||||||
|
assert result.shape[1] == signal.shape[1]
|
||||||
|
|
||||||
|
|
||||||
|
def test_resample_is_complex():
|
||||||
|
"""Resampled output is complex."""
|
||||||
|
result = iq_impairments.resample(DATA_4, up=2, down=1)
|
||||||
|
assert np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_resample_recording_input():
|
||||||
|
"""Returns a Recording when given a Recording; metadata preserved."""
|
||||||
|
rec = Recording(data=DATA_4.copy(), metadata=SAMPLE_METADATA)
|
||||||
|
result = iq_impairments.resample(rec, up=2, down=1)
|
||||||
|
assert isinstance(result, Recording)
|
||||||
|
assert result.metadata["source"] == "test"
|
||||||
|
|
||||||
|
|
||||||
|
def test_resample_unchanged_ratio():
|
||||||
|
"""up == down should return the same number of samples."""
|
||||||
|
result = iq_impairments.resample(DATA_4, up=3, down=3)
|
||||||
|
assert result.shape[1] == DATA_4.shape[1]
|
||||||
|
|
||||||
|
|
||||||
|
def test_resample_invalid_real_input():
|
||||||
|
"""Raises ValueError for real (non-complex) input."""
|
||||||
|
real_data = np.array([[1.0, 2.0, 3.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
iq_impairments.resample(real_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_resample_downsample_returns_shorter_array():
|
||||||
|
"""BUG documentation: up=1, down=2 returns a shorter array instead of zero-padding.
|
||||||
|
|
||||||
|
The 'else' branch of resample() builds 'empty_array' but never returns it.
|
||||||
|
The shorter resampled_iqdata is returned directly. This test documents the
|
||||||
|
actual (potentially unintended) behaviour so any future fix is detectable.
|
||||||
|
"""
|
||||||
|
signal = np.array([[1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j, 5 + 5j, 6 + 6j]], dtype=np.complex128)
|
||||||
|
result = iq_impairments.resample(signal, up=1, down=2)
|
||||||
|
# Downsampling by 2 produces ~3 samples; the empty_array logic is dead code.
|
||||||
|
assert result.shape[1] < signal.shape[1], (
|
||||||
|
"resample with up<down should return fewer samples than the input "
|
||||||
|
"(empty_array is built but discarded — dead code)."
|
||||||
|
)
|
||||||
209
tests/utils/test_array_conversion.py
Normal file
209
tests/utils/test_array_conversion.py
Normal file
|
|
@ -0,0 +1,209 @@
|
||||||
|
"""
|
||||||
|
Unit tests for ria_toolkit_oss.utils.array_conversion.
|
||||||
|
|
||||||
|
Covers:
|
||||||
|
- is_1xn / is_2xn classification
|
||||||
|
- convert_to_1xn / convert_to_2xn conversion
|
||||||
|
- Round-trip invariance
|
||||||
|
- Error paths for invalid inputs
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ria_toolkit_oss.utils.array_conversion import (
|
||||||
|
convert_to_1xn,
|
||||||
|
convert_to_2xn,
|
||||||
|
is_1xn,
|
||||||
|
is_2xn,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Fixtures
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
COMPLEX_1XN = np.array([[1 + 2j, 3 + 4j, 5 + 6j]], dtype=np.complex128) # shape (1, 3)
|
||||||
|
REAL_2XN = np.array([[1.0, 3.0, 5.0], [2.0, 4.0, 6.0]], dtype=np.float64) # shape (2, 3)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# is_1xn
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_true_for_complex_1xn():
|
||||||
|
assert is_1xn(COMPLEX_1XN) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_false_for_real_2xn():
|
||||||
|
assert is_1xn(REAL_2XN) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_false_for_1d_complex():
|
||||||
|
arr = np.array([1 + 2j, 3 + 4j]) # 1-D
|
||||||
|
assert is_1xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_false_for_3d():
|
||||||
|
arr = np.ones((1, 3, 3), dtype=np.complex128)
|
||||||
|
assert is_1xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_false_for_real_1xn():
|
||||||
|
arr = np.array([[1.0, 2.0, 3.0]]) # real 1×N
|
||||||
|
assert is_1xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_false_for_complex_2xn():
|
||||||
|
arr = np.array([[1 + 2j, 3 + 4j], [5 + 6j, 7 + 8j]]) # complex 2×N
|
||||||
|
assert is_1xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_1xn_single_sample():
|
||||||
|
arr = np.array([[1 + 0j]]) # shape (1, 1)
|
||||||
|
assert is_1xn(arr) is True
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# is_2xn
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_2xn_true_for_real_2xn():
|
||||||
|
assert is_2xn(REAL_2XN) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_2xn_false_for_complex_1xn():
|
||||||
|
assert is_2xn(COMPLEX_1XN) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_2xn_false_for_1d():
|
||||||
|
arr = np.array([1.0, 2.0, 3.0])
|
||||||
|
assert is_2xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_2xn_false_for_3xn():
|
||||||
|
arr = np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]) # shape (3, 2)
|
||||||
|
assert is_2xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_2xn_false_for_complex_2xn():
|
||||||
|
arr = np.array([[1 + 2j, 3 + 4j], [5 + 6j, 7 + 8j]]) # complex 2×N
|
||||||
|
assert is_2xn(arr) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_2xn_single_column():
|
||||||
|
arr = np.array([[1.0], [2.0]]) # shape (2, 1)
|
||||||
|
assert is_2xn(arr) is True
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# convert_to_2xn
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_2xn_from_1xn_shape():
|
||||||
|
result = convert_to_2xn(COMPLEX_1XN)
|
||||||
|
assert result.shape == (2, COMPLEX_1XN.shape[1])
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_2xn_from_1xn_values():
|
||||||
|
"""First row is real, second row is imaginary."""
|
||||||
|
result = convert_to_2xn(COMPLEX_1XN)
|
||||||
|
assert np.array_equal(result[0], COMPLEX_1XN[0].real)
|
||||||
|
assert np.array_equal(result[1], COMPLEX_1XN[0].imag)
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_2xn_from_1xn_is_real():
|
||||||
|
result = convert_to_2xn(COMPLEX_1XN)
|
||||||
|
assert not np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_2xn_from_2xn_is_copy():
|
||||||
|
"""Already-2xN input returns a copy (not the same object)."""
|
||||||
|
result = convert_to_2xn(REAL_2XN)
|
||||||
|
assert np.array_equal(result, REAL_2XN)
|
||||||
|
assert result is not REAL_2XN
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_2xn_invalid_raises():
|
||||||
|
"""1-D array is neither 1xN nor 2xN — must raise ValueError."""
|
||||||
|
arr = np.array([1.0, 2.0, 3.0])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
convert_to_2xn(arr)
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_2xn_invalid_complex_2xn_raises():
|
||||||
|
"""Complex 2×N is not a recognised format — must raise ValueError."""
|
||||||
|
arr = np.array([[1 + 2j, 3 + 4j], [5 + 6j, 7 + 8j]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
convert_to_2xn(arr)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# convert_to_1xn
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_1xn_from_2xn_shape():
|
||||||
|
result = convert_to_1xn(REAL_2XN)
|
||||||
|
assert result.shape == (1, REAL_2XN.shape[1])
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_1xn_from_2xn_values():
|
||||||
|
"""Real part from row 0, imaginary from row 1."""
|
||||||
|
result = convert_to_1xn(REAL_2XN)
|
||||||
|
assert np.array_equal(result[0].real, REAL_2XN[0])
|
||||||
|
assert np.array_equal(result[0].imag, REAL_2XN[1])
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_1xn_from_2xn_is_complex():
|
||||||
|
result = convert_to_1xn(REAL_2XN)
|
||||||
|
assert np.iscomplexobj(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_1xn_from_1xn_is_copy():
|
||||||
|
"""Already-1xN input returns a copy (not the same object)."""
|
||||||
|
result = convert_to_1xn(COMPLEX_1XN)
|
||||||
|
assert np.array_equal(result, COMPLEX_1XN)
|
||||||
|
assert result is not COMPLEX_1XN
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_1xn_invalid_raises():
|
||||||
|
"""1-D array is neither 1xN nor 2xN — must raise ValueError."""
|
||||||
|
arr = np.array([1.0, 2.0, 3.0])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
convert_to_1xn(arr)
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_to_1xn_invalid_3xn_raises():
|
||||||
|
"""3×N array is not a recognised format — must raise ValueError."""
|
||||||
|
arr = np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]])
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
convert_to_1xn(arr)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Round-trip invariance
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_roundtrip_1xn_to_2xn_to_1xn():
|
||||||
|
"""1xN → 2xN → 1xN should recover the original values."""
|
||||||
|
intermediate = convert_to_2xn(COMPLEX_1XN)
|
||||||
|
recovered = convert_to_1xn(intermediate)
|
||||||
|
assert np.allclose(recovered, COMPLEX_1XN)
|
||||||
|
|
||||||
|
|
||||||
|
def test_roundtrip_2xn_to_1xn_to_2xn():
|
||||||
|
"""2xN → 1xN → 2xN should recover the original values."""
|
||||||
|
intermediate = convert_to_1xn(REAL_2XN)
|
||||||
|
recovered = convert_to_2xn(intermediate)
|
||||||
|
assert np.allclose(recovered, REAL_2XN)
|
||||||
|
|
||||||
|
|
||||||
|
def test_roundtrip_preserves_precision():
|
||||||
|
"""Values survive a double conversion with full float64 precision."""
|
||||||
|
data = np.array([[1.23456789 + 9.87654321j, -0.1 - 0.2j]], dtype=np.complex128)
|
||||||
|
recovered = convert_to_1xn(convert_to_2xn(data))
|
||||||
|
assert np.allclose(recovered, data, atol=1e-14)
|
||||||
Loading…
Reference in New Issue
Block a user