Adding Sphinx docs set

This commit is contained in:
Michael Luciuk 2025-09-04 12:29:54 -04:00
parent d8a7dc16be
commit 131dc3f933
47 changed files with 1860 additions and 219 deletions

View File

@ -129,7 +129,7 @@ pip install -e .
Once the project is installed, you can import its modules, functions, and classes for use in your Python code. For example, you can use the following import statement to access the `Recording` object:
```python
from utils.data import Recording
from ria_toolkit_oss.datatypes import Recording
```
Additional usage information is provided in the project documentation.
@ -195,7 +195,7 @@ Project documentation is auto-generated from project docstrings using [Sphinx](h
It's recommended to use `sphinx-autobuild`, which eliminates the need to manually rebuild the docs after making changes:
```bash
sphinx-autobuild docs/source docs/build/html
poetry run sphinx-autobuild docs/source docs/build/html
```
When using `sphinx-autobuild`, the docs will automatically be served at http://127.0.0.1:8000.
@ -208,7 +208,7 @@ make html
Once the documentation is built, you can view it by opening `docs/build/html/index.html` in a web browser. Please note that this strategy requires manually rebuilding the documentation to view updates.
For more information on basic Sphinx usage, start [here](https://sphinx-rtd-tutorial.readthedocs.io/en/latest/index.html).
For more information on basic Sphinx usage, start [here](https://sphinx-rtd-tutorial.redatatypeshedocs.io/en/latest/index.html).
### tox

20
docs/Makefile Normal file
View File

@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

35
docs/make.bat Normal file
View File

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)
if "%1" == "" goto help
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

View File

@ -0,0 +1,402 @@
{% extends "!layout.html" %}
{% block footer %} {{ super() }}
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Open+Sans:wght@300&display=swap">
<style>
* {
color: #f0f0f0;
font-family: 'Open Sans', sans-serif;
font-weight: 300;
}
.wy-nav-content:not(:has(section)) {
max-width: 1200px;
}
.rst-content div[class^=highlight] pre {
font-size: 14px;
}
nav ul.current li, nav ul li.current, nav ul.current li.current, [aria-expanded="true"] {
background-color: #1c1c1c;
color: #f0f0f0;
}
nav ul.current li:hover, nav ul li.current:hover, nav ul.current li.current:hover, [aria-expanded="true"]:hover {
background-color: #4e4a4a;
}
.wy-menu-vertical li.toctree-l2.current>a {
background-color: #1c1c1c;
color: #f0f0f0;
}
.wy-menu-vertical li.toctree-l2.current>a:hover {
background-color: #4e4a4a;
}
.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a {
background-color: #1c1c1c;
color: #f0f0f0;
}
.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a:hover {
background-color: #4e4a4a;
}
.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a {
background-color: #1c1c1c;
color: #f0f0f0;
}
.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a:hover {
background-color: #4e4a4a;
}
.wy-menu-vertical li.current>a {
background-color: #1c1c1c;
color: #f0f0f0;
}
.wy-menu-vertical li.current>a:hover {
background-color: #4e4a4a;
}
.wy-menu-vertical li.toctree-l1.current>a {
border: none;
color: #f0f0f0;
}
.wy-menu-vertical li.current a {
border: none;
color: #f0f0f0;
}
.wy-menu-vertical li.current a:hover {
background-color: #4e4a4a;
}
.wy-menu-vertical a {
color: #f0f0f0;
}
.wy-side-nav-search, .wy-nav-top {
background: #625F63;
}
.wy-nav-side {
background: #1c1c1c;
}
.wy-body-for-nav {
background: #1c1c1c;
}
.wy-nav-content-wrap {
background: #1c1c1c;
}
.wy-nav-content {
background: #222222;
}
.wy-nav-content a {
color: #03A9F4;
}
#subpackages span.pre {
color: #03A9F4;
}
* h1, h2, h3, h4, h5, h6 {
color: #f0f0f0;
font-family: 'Open Sans', sans-serif;
font-weight: 300;
}
code.docria_toolkit_oss.literal.notranslate {
border-radius: 4px;
border: 1px solid rgba(255, 255, 255, .1);
background: #171717;
padding: 3px 4px;
}
code.docria_toolkit_oss.literal.notranslate span.pre:not(.wy-menu code.docria_toolkit_oss.literal.notranslate span.pre):not(.toctree-wrapper.compound code.docria_toolkit_oss.literal.notranslate span.pre) {
color: #AE81FF;
font-size: 14px;
font-weight: 530;
font-family: SFMono-Regular, Consolas, liberation mono, Menlo, Courier, monospace;
background-color: #171717;
}
.rst-content code.literal {
white-space: nowrap;
}
.wy-menu code.docria_toolkit_oss.literal.notranslate, .toctree-wrapper.compound code.docria_toolkit_oss.literal.notranslate {
font-size: 11.96px;
text-decoration: none;
border: none;
background-color: inherit;
}
.wy-menu:hover code.docria_toolkit_oss.literal.notranslate span.pre {
background-color: inherit;
}
.wy-menu code.docria_toolkit_oss.literal.notranslate span.pre {
color: #f0f0f0;
background-color: #1c1c1c;
border: none;
}
.toctree-wrapper.compound code.docria_toolkit_oss.literal.notranslate span.pre {
color: #03A9F4;
border: none;
}
code.xref.py.py-obj.docria_toolkit_oss.literal.notranslate span.pre, code.xref.py.py-class.docria_toolkit_oss.literal.notranslate span.pre, code.xref.py.py-func.docria_toolkit_oss.literal.notranslate span.pre {
background-color: #222222;
border: none;
font-family: ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;
font-feature-settings: normal;
font-variation-settings: normal;
color: #fff;
}
code.xref.py.py-func.docria_toolkit_oss.literal.notranslate span.pre {
font-size: 16px;
}
.sig span.pre, .sig span.viewcode-link {
color: #000000;
font-family: ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;
font-feature-settings: normal;
font-variation-settings: normal;
text-decoration: none;
}
@media screen and (max-width: 550px) {
.sig span.pre, .sig span.viewcode-link {
font-family: 'Open Sans', sans-serif;
}
}
.sig span.sig-paren {
color: #000000;
font-family: ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;
font-feature-settings: normal;
font-variation-settings: normal;
}
.rst-content h1 {
color: #fff;
}
.rst-content h2 {
color: #fff;
}
.rst-content h3 {
color: #fff;
}
.admonition.note, .admonition.todo {
background-color: #f0f0f0;
}
#indices-and-tables h1 {
color: #f0f0f0;
font-family: 'Open Sans', sans-serif;
font-weight: 300;
}
#indices-and-tables .std {
color: #03A9F4;
}
.admonition-todo.admonition {
background: #1c1c1c;
color: #e3a72c;
border-radius: 0.5rem;
border: 1px solid #e3a72c;
padding: 1.5rem;
}
.admonition-todo.admonition .admonition-title {
background-color: transparent;
}
.admonition-todo.admonition .admonition-title::before {
color: #e3a72c;
}
.admonition.note {
background: #1c1c1c;
color: #1b83e2;
border-radius: 0.5rem;
border: 1px solid #1b83e2;
padding: 1.5rem;
}
.admonition.note .admonition-title {
background-color: transparent;
}
.admonition.note .admonition-title::before {
color: #1b83e2;
}
.admonition.tip {
background: #1c1c1c;
color: #40d294;
border-radius: 0.5rem;
border: 1px solid #40d294;
padding: 1.5rem;
}
.admonition.tip .admonition-title {
background-color: transparent;
}
.admonition.tip .admonition-title::before {
color: #40d294;
}
.fa-arrow-circle-right, .fa-arrow-circle-left{
color: #1c1c1c;
}
.rst-content div[class^=highlight] {
border: none;
}
.highlight {
border: 1px solid #e3a72c;
border-radius: 0.5rem;
--color-prettylights-syntax-variable: #ffa657;
font-family: ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;
overflow-x: auto;
}
.highlight pre {
color:#f8f8f2;
background-color:#272822;
-moz-tab-size:4;
-o-tab-size:4;
tab-size:4;
--color-prettylights-syntax-variable: #ffa657;
font-family: ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;
}
.highlight pre span {
font-family: ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;
font-weight: normal;
}
.highlight pre span.o {
color: #f92672;
font-weight: normal;
}
.highlight pre span.kn {
color: #f92672;
font-weight: normal;
}
.highlight pre span.s2 {
color: #E6DB74;
font-weight: normal;
}
.highlight pre span.c1 {
color: #75715E;
font-weight: normal;
}
.highlight pre span.mi {
color: #AE81FF;
font-weight: normal;
}
.highlight pre span.nn {
color: #f0f0f0;
font-weight: normal;
}
.highlight .k {
color: #66d9ef;
font-weight: normal;
}
.highlight .sd {
color: #e6db74;
font-weight: normal;
}
.highlight .nc, .highlight .fm, .highlight .nd, .highlight .nf, .highlight .ne{
color: #a6e22e;
font-weight: normal;
}
.highlight .ow {
color: #f92672;
font-weight: normal;
}
.highlight .kc {
color: #66d9ef;
font-weight: normal;
}
.highlight .nb, .highlight .bp, .highlight .vm {
color: #f0f0f0;
font-weight: normal;
}
.highlight .go {
color: #f8f8f2;
}
.highlight .gp {
color: #AE81FF;
}
.highlight .s1 {
color: #AE81FF;
}
html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt {
display: block;
}
html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt, html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt {
border-top: 2px solid #03A9F4;
border-radius: 2px;
background: #414040;
color: #03A9F4;
}
html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt {
border-top: none;
}
html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt span.pre, .sig span.sig-paren {
color: #fff;
}
html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink {
color: #fff;
}
* span.pre {
color: #03A9F4;
}
.wy-menu span.pre {
color: #f0f0f0;
}
</style>
{% endblock %}

68
docs/source/conf.py Normal file
View File

@ -0,0 +1,68 @@
# Configuration file for the Sphinx documentation builder.
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join('..', '..')))
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = 'ria-toolkit-oss'
copyright = '2025, Qoherent Inc'
author = 'Qoherent Inc.'
release = '0.1.0'
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
'sphinx.ext.todo',
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx'
]
autodoc_typehints = "none"
autodoc_typehints_format = "short"
python_use_unqualified_type_names = True
todo_include_todos = True
templates_path = ['.templates']
exclude_patterns = []
autodoc_mock_imports = ['uhd', 'adi', 'iio', 'rtlsdr']
autodoc_default_options = {
'members': True,
'special-members': '__call__'
}
version_link = f"{sys.version_info.major}.{sys.version_info.minor}"
intersphinx_mapping = {'python': (f'https://docs.python.org/{version_link}', None),
'numpy': ('https://numpy.org/doc/stable', None),
'scipy': ('https://docs.scipy.org/doc/scipy', None),
'matplotlib': ('https://matplotlib.org/stable', None)}
def autodoc_process_docstring(app, what, name, obj, options, lines):
for i in range(len(lines)):
lines[i] = lines[i].replace("np.", "numpy.")
autodoc_member_order = 'bysource'
def setup(app):
app.connect("autodoc-process-docstring", autodoc_process_docstring)
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = 'sphinx_rtd_theme'

19
docs/source/index.rst Normal file
View File

@ -0,0 +1,19 @@
RIA Toolkit OSS Documentation
=============================
.. toctree::
:maxdepth: 2
Introduction <intro/index>
Datatypes Package <ria_toolkit_oss/datatypes/ria_toolkit_oss.datatypes>
IO Package <ria_toolkit_oss/ria_toolkit_oss.io>
Transforms Package <ria_toolkit_oss/ria_toolkit_oss.transforms>
Utils Package <ria_toolkit_oss/ria_toolkit_oss.utils>
Viz Package <ria_toolkit_oss/ria_toolkit_oss.viz>
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@ -0,0 +1,7 @@
Contribution
============
Contributions are always welcome! Whether it's an enhancement, bug fix, or new usage examples, your input is valuable.
If you're interested in contributing to RIA Toolkit OSS, please don't hesitate to reach out to the project maintainers.
The project guidelines are outlined in the project ``README.md`` file.

View File

@ -0,0 +1,4 @@
Getting Started
===============
RIA Toolkit OSS is a Python library.

View File

@ -0,0 +1,15 @@
.. _intro:
############
Introduction
############
RIA Toolkit OSS is the open-source version of the RIA Toolkit, providing the fundamental components to help engineers and researchers get started building, testing, and deploying radio intelligence applications.
.. toctree::
:maxdepth: 2
:caption: Contents:
Installation <installation>
Getting Started <getting_started>
Contribution <contribution>

View File

@ -0,0 +1,87 @@
Installation
============
Installation from Conda Forge (recommended)
-------------------------------------------
It is recommended to use RIA Toolkit OSS is within a `Radioconda <https://anaconda.org/ryanvolz/radioconda>`_ environment,
which provides pre-configured packages and libraries required for common SDR devices. Radioconda installation and setup instructions
can be found in the project README: `radioconda-installer <https://github.com/radioconda/radioconda-installer>`_.
Install RIA Toolkit OSS into the base environment:
.. code-block:: bash
conda activate base
conda install ria-toolkit-oss
.. note::
**(Coming soon)** Install RIA Toolkit OSS from RIA Hub's Conda Package Registry.
Installation from PyPI / RIA Hub
--------------------------------
You can also install RIA Toolkit OSS in a standard Python virtual environment using Pip.
Addition information on Python virtual environments can be found here: `W3Schools: Python Virtual Environment <https://www.w3schools.com/python/python_virtualenv.asp>`_.
1. Create and activate a Python virtual environment:
On Linux/macOS:
.. code-block:: bash
python3 -m venv venv
source venv/bin/activate
On Windows (Command Prompt):
.. code-block:: doscon
python -m venv venv
venv\Scripts\activate
2. Install RIA Toolkit OSS with Pip:
.. code-block:: bash
pip install ria-toolkit-oss
RIA Toolkit OSS can also be installed from the RIA Hub Python Index. However, because RIA Hub does not yet support a proxy or cache for public packages, you need to use the `--no-deps` option with pip to skip automatic dependency installation, and then manually install each dependency afterward.
.. code-block:: bash
pip install --index-url https://riahub.ai/api/packages/qoherent/pypi/simple/ ria-toolkit-oss --no-deps
Installation from source
------------------------
You can also install RIA Toolkit OSS directly from the source code:
1. Clone the repository. For example:
.. code-block:: bash
git clone https://riahub.ai/qoherent/ria-toolkit-oss.git
2. Navigate into the project directory:
.. code-block:: bash
cd ria-toolkit-oss
3. Install the package:
.. code-block:: bash
pip install .
.. note::
If you plan to modify the project code and want changes to take effect immediately without reinstalling, you can install the project in editable mode:
.. code-block:: bash
pip install -e .

View File

@ -0,0 +1,311 @@
.. _radio_datasets:
Intro to radio datasets
=======================
In RIA, radio datasets are iterable datasets, designed specifically for machine learning applications in radio signal
processing and analysis. The individual examples are stored alongside the corresponding metadata in high-performance
HDF5 files, referred to as dataset source files.
The Radio Dataset Framework provides a software interface to access and manipulate these source files. This eliminates
the need for users to interface with the source files directly. Instead, users initialize and interact with a Python
object, while the complexities of efficient data retrieval and source file manipulation are managed behind the scenes.
Utils includes an abstract class called :py:obj:`ria_toolkit_oss.datatypes.datasets.RadioDataset`, which defines common properties and
behaviors for all radio datasets. :py:obj:`ria_toolkit_oss.datatypes.datasets.RadioDataset` can be considered a blueprint for all
other radio dataset classes. This class is then subclassed to define more specific blueprints for different types
of radio datasets. For example, :py:obj:`ria_toolkit_oss.datatypes.datasets.IQDataset`, which is tailored for machine learning tasks
involving the processing of signals represented as IQ (In-phase and Quadrature) samples.
Then, in the various project backends, there are concrete dataset classes, which inherit from both Utils and the base
dataset class from the respective backend. For example, the :py:obj:`TorchIQDataset` class extends both
:py:obj:`ria_toolkit_oss.datatypes.datasets.IQDataset` from Utils and :py:obj:`torch.ria_toolkit_oss.datatypes.IterableDataset` from
PyTorch, providing a concrete dataset class tailored for IQ datasets and optimized for the PyTorch backend.
Dataset initialization
----------------------
There are three ways to initialize a radio dataset:
1. Use an RIA dataset builder to download and prepare an off-the-shelf dataset.
2. Use the RIA Curator to curate a dataset from a collection of recordings.
3. Initialize a dataset from a source file.
Off-the-shelf datasets
~~~~~~~~~~~~~~~~~~~~~~
Qoherent provides a wide selection of off-the-shelf machine learning datasets for radio. These can be downloaded and
initialized using the corresponding dataset builders. For example, we can initialize a new instance of Qoherent's AWGN
Modulation dataset using :py:obj:`AWGN_Builder`:
>>> from ria.dataset_manager.builders import AWGN_Builder
>>> awgn_builder = AWGN_Builder()
>>> awgn_builder.download_and_prepare()
>>> awgn_ds = awgn_builder.as_dataset(backend="pytorch")
Because we specified ``backend="pytorch"``, we got back a ``TorchIQDataset``, which is compatible with the PyTorch
framework for machine learning.
>>> awgn_ds._class__.__name__
TorchIQDataset
If we specify an alternative backend, we will get a different class. For example:
>>> awgn_dataset_tf = awgn_builder.as_dataset(backend="tensorflow")
>>> awgn_dataset_tf._class__.__name__
TensorFlowIQDataset
However, both datasets are radio datasets. And, in the case of the AWGN Modulation dataset, both are IQ datasets.
>>> isinstance(awgn_ds, RadioDataset)
True
>>> isinstance(awgn_ds, IQDataset)
True
Dataset curation
~~~~~~~~~~~~~~~~
A second way to initialize a dataset is by curating it from a collection or folder of radio recordings. For example:
>>> from ria.dataset_manager.curator import Curator, SimpleSlicer, RMSQualifier
>>> slicer = SimpleSlicer()
>>> qualifier = RMSQualifier()
>>> curator = Curator(slicer=Slicer, qualifier=Qualifier)
>>> ds = curator.curate("path/to/folder/of/recording/files", backend="pytorch")
Please refer to the Curator Package for more information regarding dataset curation.
Initializing from source
~~~~~~~~~~~~~~~~~~~~~~~~
The third way to initialize a dataset is directly from the source file. For example, I can initialize a
``TensorFlowIQDataset`` using the source file curated above:
>>> from ria.pytorch_backend.datasets import TensorFlowIQDataset
>>> ds_tf = TensorFlowIQDataset(source=ds.source)
Notice that ``ds`` and ``ds_tf`` are equal, and any inplace operations performed on one will affect the state of the
other:
>>> ds == tf_ds
True
This underscores a key point: There are no backend-specific details in the dataset files themselves. Instead, support
for different backends is provided through the software interface.
Dataset usage
-------------
Datasets from the PyTorch backend are just that, PyTorch datasets. They are substitutable for any other PyTorch
dataset, and used just the same. For example, to initialize a dataloader:
>>> from ria.dataset_manager.builders import AWGN_Builder
>>> from torch.ria_toolkit_oss.data import DataLoader
>>> builder = AWGN_Builder()
>>> builder.download_and_prepare()
>>> ds = builder.as_dataset(backend="pytorch")
>>> dl = DataLoader(ds, batch_size=32, shuffle=True)
Similarly, datasets in the Tensorflow backend are just that, TensorFlow datasets. They are substitute for any other
Tensorflow dataset, and used just the same. For example:
>>> from ria.dataset_manager.builders import AWGN_Builder
>>> from torch.ria_toolkit_oss.data import DataLoader
>>> builder = AWGN_Builder()
>>> builder.download_and_prepare()
>>> ds = builder.as_dataset(backend="tensorflow")
>>> ds = ds.shuffle(buffer_size=1000).batch(batch_size=32)
All datasets contain a table of metadata, which can be accessed through the ``metadata`` property:
>>> md = ds.metadata
At any index in the dataset, the metadata will always correspond to the data at that same index. Metadata labels can
be viewed using the ``labels`` property:
>>> ds.labels
['rec_id', 'snr', 'modulation']
Dataset processing and manipulation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
All radio datasets support methods tailored specifically for radio processing. These methods are backend-independent,
inherited from the blueprints in Utils like :py:obj:`ria_toolkit_oss.datatypes.datasets.RadioDataset`.
For example, we can trim down the length of the examples from 1,024 to 512 samples, and then augment the dataset:
>>> ds_trimmed = ds.trim_examples(trim_length=512)
>>> ds_augmented = ds_trimmed.augment(level=1.0)
.. note:: Because no augmentations were specified, ``augment()`` applied the default IQ augmentations returned by the
:py:obj:`IQDataset.default_augmentations()` method.
The dataset state is managed within the source file, rather than in memory. This allows us to process and
train on very large datasets, even if they exceed available memory capacity.
Each operation creates and returns a new dataset object initialized from a new source file. Using the builder, the
AWGN Modulation source file was downloaded as ``modulation_awgn.hdf5`` source file, whose state is accessed with
``ds``. Then, trimming the examples in the dataset created a new source file called ``modulation_awgn.001.hdf5``,
whose state is accessed with ``ds_trimmed``. Augmentation then generated a third file called
``modulation_awgn.002.hdf5``. This is important to keep in mind, especially when working with large datasets.
Optionally, we could have performed these preprocessing augmentations inplace by specifying ``inplace=True``.
While inplace operations are more memory efficient, they can lead to
`aliasing <https://www.teach.cs.toronto.edu/~csc110y/fall/notes/06-memory-model/05-aliasing.html>`_. Therefore, we
recommend limiting the use of inplace operations to memory limited application where you know what you're doing.
All dataset processing and manipulation operations operate on the metadata too. As a result, at any index in
the dataset, the metadata will always accurately reflect the data at that same index.
Slicing, indexing, and filtering
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Radio datasets support indexing, slicing, and filtering using Python's idiomatic square bracket syntax. For example:
>>> example = ds[2] # Retrieve the example at index 2.
>>> ds_subset = ds[10:20] # Retrieve a slice of the dataset from index 10 to 20.
>>> ds_filtered = ds[ds.metadata['snr'] > 3] # Retrieve all examples where the SNR is greater than 3 dB
Notice that ``example`` is a NumPy array, while both ``subset`` and ``filtered_ds`` are dataset objects.
To read the whole dataset into memory as a NumPy array, use the ``data`` property:
>>> arr = ds_filtered.data # Equivalent to arr = ds_filtered[:]
Dataset iterators
~~~~~~~~~~~~~~~~~
You can iterate over radio datasets manually, just like with a list:
>>> for i, example in enumerate(ds):
... print(f"Example at index {i}:")
... print(example)
Source files
------------
Dataset source files are high-performance HDF5 files that contain both data and metadata in a single self-descriptive
file.
Source file format
~~~~~~~~~~~~~~~~~~
While average users need not concern themselves with the source file format, those creating their own source files will
need to familiarize themselves with the expected format. As a first step, we recommend familiarizing yourself with
some `core concepts <https://docs.h5py.org/en/stable/quick.html#core-concepts>`_ pertaining to the HDF5 file format.
.. note::
If you're having trouble converting your radio dataset into a source file compatible with the Radio Dataset
Framework, please let us know. We'd be happy to assist.
Here is the source file format:
.. code-block:: text
root/
├── data (Dataset)
│ ├── [Full dataset license (Attribute)]
│ └── [dataset examples, to use as input to the model]
└── metadata (Group)
├── metadata (Dataset)
│ ├── rec_id (Column)
│ ├── sample_rate (Column)
│ └── ...
└── about (Dataset)
├── author
├── name
└── ...
Additional datasets can be added at the root level as required. For example, some datasets---such as the MathWork's
Spectrum Sensing dataset---contain a separate dataset at the root level for the pixel masks. Should these extra datasets
exist, they need to be the same shape as the primary dataset.
.. code-block:: text
root/
├── data (Dataset)
│ ├── [Full dataset license (Attribute)]
│ └── [spectrogram images, to use as input to the model]
├── masks (Dataset)
│ └── [target masks, to use for training]
└── metadata (Group)
├── metadata (Dataset)
│ ├── rec_id (Column)
│ ├── sample_rate (Column)
│ └── ...
└── about (Dataset)
├── author
├── name
└── ...
Data format
~~~~~~~~~~~
IQ data
^^^^^^^
IQ data is stored as complex numbers where each data point is a complex value combining
in-phase (I) and quadrature (Q) components. The precision depends on the application, but often each component (real
and imaginary) is stored as a 32-bit floating-point number.
IQ data is stored a 3-dimensional array with the shape ``M x C x N``:
- ``M``: Represents the number of examples in the dataset.
- ``C``: Indicates the number of radio channels.
- ``N``: Denotes the length of each signal, which is the number of data points in each example.
Spectrogram data
^^^^^^^^^^^^^^^^
Spectrogram data is stored as real numbers, the exact format of which depends on the image format.
Spectrogram data is stored a 4-dimensional array with the shape M x C x H x W:
- ``M``: Represents the number of examples in the dataset.
- ``C``: Indicates the number of image channels.
- ``H x W``: Denotes the height and width of the spectrogram images, respectively.
Reading source files
~~~~~~~~~~~~~~~~~~~~
Here's an example of how to read these source files in pure Python, using the
`h5py <https://docs.h5py.org/en/stable/index.html>`_ library:
.. code-block:: python
import hdf5
with h5py.File(dataset_file, "r") as f:
data = f['data']
print(f"Length of the dataset: {len(data)}")
print("Keys in metadata/about:")
for attr_name, attr_value in mataset_info.attrs.items():
print(f"{attr_name}: {attr_value}")
print("Keys in metadata/metadata:")
for attr_name, attr_value in metadata.attrs.items():
print(f"{attr_name}: {attr_value}")
To load in the data as a numpy array and the metadata as a pandas DataFrame:
.. code-block:: python
import hdf5
import pandas as pd
with h5py.File(dataset_file, "r") as f:
data = f['data'][:]
metadata = pd.DataFrame(f["metadata/metadata"][:])
.. note:: It is generally inadvisable to read the entire dataset into memory.

View File

@ -0,0 +1,7 @@
Dataset License SubModule
=========================
.. automodule:: ria_toolkit_oss.datatypes.datasets.license
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,37 @@
Datatypes Package (ria_toolkit_oss.datatypes)
=============================================
.. |br| raw:: html
<br />
.. automodule:: ria_toolkit_oss.datatypes
Package Contents
----------------
.. automodule:: ria_toolkit_oss.datatypes.annotation
:members:
:undoc-members:
:show-inheritance:
.. _rec-label:
.. automodule:: ria_toolkit_oss.datatypes.recording
:members:
:undoc-members:
:show-inheritance:
Radio Dataset SubPackage
------------------------
.. automodule:: ria_toolkit_oss.datatypes.datasets
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 2
Dataset License SubModule <ria_toolkit_oss.datatypes.datasets.license>
Radio Datasets <radio_datasets>

View File

@ -0,0 +1,4 @@
IO Package (ria_toolkit_oss.io)
===============================
.. automodule:: ria_toolkit_oss.io

View File

@ -0,0 +1,20 @@
Transforms (ria_toolkit_oss.transforms)
=======================================
.. automodule:: ria_toolkit_oss.transforms
IQ Impairments
--------------
.. automodule:: ria_toolkit_oss.transforms.iq_impairments
:members:
:undoc-members:
:show-inheritance:
IQ Augmentations
----------------
.. automodule:: ria_toolkit_oss.transforms.iq_augmentations
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,19 @@
Utils Package (ria_toolkit_oss.utils)
=====================================
Package Contents
----------------
.. automodule:: ria_toolkit_oss.utils
:members:
:undoc-members:
:show-inheritance:
Array Conversion SubModule
--------------------------
.. automodule:: ria_toolkit_oss.utils.array_conversion
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,12 @@
Viz (ria_toolkit_oss.view)
==========================================
.. automodule:: ria_toolkit_oss.viz
Recording
---------
.. automodule:: ria_toolkit_oss.viz.recording
:members:
:undoc-members:
:show-inheritance:

571
poetry.lock generated
View File

@ -48,6 +48,26 @@ files = [
[package.dependencies]
typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
[[package]]
name = "attrs"
version = "25.3.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"},
{file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"},
]
[package.extras]
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
[[package]]
name = "babel"
version = "2.17.0"
@ -363,6 +383,45 @@ files = [
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
]
[[package]]
name = "h5py"
version = "3.14.0"
description = "Read and write HDF5 files from Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "h5py-3.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:24df6b2622f426857bda88683b16630014588a0e4155cba44e872eb011c4eaed"},
{file = "h5py-3.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ff2389961ee5872de697054dd5a033b04284afc3fb52dc51d94561ece2c10c6"},
{file = "h5py-3.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:016e89d3be4c44f8d5e115fab60548e518ecd9efe9fa5c5324505a90773e6f03"},
{file = "h5py-3.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1223b902ef0b5d90bcc8a4778218d6d6cd0f5561861611eda59fa6c52b922f4d"},
{file = "h5py-3.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:852b81f71df4bb9e27d407b43071d1da330d6a7094a588efa50ef02553fa7ce4"},
{file = "h5py-3.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f30dbc58f2a0efeec6c8836c97f6c94afd769023f44e2bb0ed7b17a16ec46088"},
{file = "h5py-3.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:543877d7f3d8f8a9828ed5df6a0b78ca3d8846244b9702e99ed0d53610b583a8"},
{file = "h5py-3.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c497600c0496548810047257e36360ff551df8b59156d3a4181072eed47d8ad"},
{file = "h5py-3.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:723a40ee6505bd354bfd26385f2dae7bbfa87655f4e61bab175a49d72ebfc06b"},
{file = "h5py-3.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:d2744b520440a996f2dae97f901caa8a953afc055db4673a993f2d87d7f38713"},
{file = "h5py-3.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e0045115d83272090b0717c555a31398c2c089b87d212ceba800d3dc5d952e23"},
{file = "h5py-3.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6da62509b7e1d71a7d110478aa25d245dd32c8d9a1daee9d2a42dba8717b047a"},
{file = "h5py-3.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554ef0ced3571366d4d383427c00c966c360e178b5fb5ee5bb31a435c424db0c"},
{file = "h5py-3.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cbd41f4e3761f150aa5b662df991868ca533872c95467216f2bec5fcad84882"},
{file = "h5py-3.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:bf4897d67e613ecf5bdfbdab39a1158a64df105827da70ea1d90243d796d367f"},
{file = "h5py-3.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:aa4b7bbce683379b7bf80aaba68e17e23396100336a8d500206520052be2f812"},
{file = "h5py-3.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9603a501a04fcd0ba28dd8f0995303d26a77a980a1f9474b3417543d4c6174"},
{file = "h5py-3.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8cbaf6910fa3983c46172666b0b8da7b7bd90d764399ca983236f2400436eeb"},
{file = "h5py-3.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d90e6445ab7c146d7f7981b11895d70bc1dd91278a4f9f9028bc0c95e4a53f13"},
{file = "h5py-3.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae18e3de237a7a830adb76aaa68ad438d85fe6e19e0d99944a3ce46b772c69b3"},
{file = "h5py-3.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5cc1601e78027cedfec6dd50efb4802f018551754191aeb58d948bd3ec3bd7a"},
{file = "h5py-3.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e59d2136a8b302afd25acdf7a89b634e0eb7c66b1a211ef2d0457853768a2ef"},
{file = "h5py-3.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:573c33ad056ac7c1ab6d567b6db9df3ffc401045e3f605736218f96c1e0490c6"},
{file = "h5py-3.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccbe17dc187c0c64178f1a10aa274ed3a57d055117588942b8a08793cc448216"},
{file = "h5py-3.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f025cf30ae738c4c4e38c7439a761a71ccfcce04c2b87b2a2ac64e8c5171d43"},
{file = "h5py-3.14.0.tar.gz", hash = "sha256:2372116b2e0d5d3e5e705b7f663f7c8d96fa79a4052d250484ef91d24d6a08f4"},
]
[package.dependencies]
numpy = ">=1.19.3"
[[package]]
name = "idna"
version = "3.10"
@ -435,6 +494,43 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
[[package]]
name = "jsonschema"
version = "4.25.1"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"},
{file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"},
]
[package.dependencies]
attrs = ">=22.2.0"
jsonschema-specifications = ">=2023.03.6"
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"]
[[package]]
name = "jsonschema-specifications"
version = "2025.4.1"
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"},
{file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"},
]
[package.dependencies]
referencing = ">=0.31.0"
[[package]]
name = "markupsafe"
version = "3.0.2"
@ -530,6 +626,31 @@ files = [
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
]
[[package]]
name = "narwhals"
version = "2.3.0"
description = "Extremely lightweight compatibility layer between dataframe libraries"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "narwhals-2.3.0-py3-none-any.whl", hash = "sha256:5507b1a9a9c2b1c55a627fdf6cf722fef2e23498bd14362a332c8848a311c321"},
{file = "narwhals-2.3.0.tar.gz", hash = "sha256:b66bc4ab7b6746354f60c4b3941e3ce60c066588c35360e2dc6c063489000a16"},
]
[package.extras]
cudf = ["cudf (>=24.10.0)"]
dask = ["dask[dataframe] (>=2024.8)"]
duckdb = ["duckdb (>=1.0)"]
ibis = ["ibis-framework (>=6.0.0)", "packaging", "pyarrow-hotfix", "rich"]
modin = ["modin"]
pandas = ["pandas (>=1.1.3)"]
polars = ["polars (>=0.20.4)"]
pyarrow = ["pyarrow (>=13.0.0)"]
pyspark = ["pyspark (>=3.5.0)"]
pyspark-connect = ["pyspark[connect] (>=3.5.0)"]
sqlframe = ["sqlframe (>=3.22.0,!=3.39.3)"]
[[package]]
name = "numpy"
version = "1.26.4"
@ -582,12 +703,99 @@ version = "25.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["dev", "docs", "test"]
groups = ["main", "dev", "docs", "test"]
files = [
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
]
[[package]]
name = "pandas"
version = "2.3.2"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pandas-2.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52bc29a946304c360561974c6542d1dd628ddafa69134a7131fdfd6a5d7a1a35"},
{file = "pandas-2.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:220cc5c35ffaa764dd5bb17cf42df283b5cb7fdf49e10a7b053a06c9cb48ee2b"},
{file = "pandas-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c05e15111221384019897df20c6fe893b2f697d03c811ee67ec9e0bb5a3424"},
{file = "pandas-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc03acc273c5515ab69f898df99d9d4f12c4d70dbfc24c3acc6203751d0804cf"},
{file = "pandas-2.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d25c20a03e8870f6339bcf67281b946bd20b86f1a544ebbebb87e66a8d642cba"},
{file = "pandas-2.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21bb612d148bb5860b7eb2c10faacf1a810799245afd342cf297d7551513fbb6"},
{file = "pandas-2.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:b62d586eb25cb8cb70a5746a378fc3194cb7f11ea77170d59f889f5dfe3cec7a"},
{file = "pandas-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1333e9c299adcbb68ee89a9bb568fc3f20f9cbb419f1dd5225071e6cddb2a743"},
{file = "pandas-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76972bcbd7de8e91ad5f0ca884a9f2c477a2125354af624e022c49e5bd0dfff4"},
{file = "pandas-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b98bdd7c456a05eef7cd21fd6b29e3ca243591fe531c62be94a2cc987efb5ac2"},
{file = "pandas-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d81573b3f7db40d020983f78721e9bfc425f411e616ef019a10ebf597aedb2e"},
{file = "pandas-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e190b738675a73b581736cc8ec71ae113d6c3768d0bd18bffa5b9a0927b0b6ea"},
{file = "pandas-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c253828cb08f47488d60f43c5fc95114c771bbfff085da54bfc79cb4f9e3a372"},
{file = "pandas-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:9467697b8083f9667b212633ad6aa4ab32436dcbaf4cd57325debb0ddef2012f"},
{file = "pandas-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fbb977f802156e7a3f829e9d1d5398f6192375a3e2d1a9ee0803e35fe70a2b9"},
{file = "pandas-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b9b52693123dd234b7c985c68b709b0b009f4521000d0525f2b95c22f15944b"},
{file = "pandas-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd281310d4f412733f319a5bc552f86d62cddc5f51d2e392c8787335c994175"},
{file = "pandas-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96d31a6b4354e3b9b8a2c848af75d31da390657e3ac6f30c05c82068b9ed79b9"},
{file = "pandas-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df4df0b9d02bb873a106971bb85d448378ef14b86ba96f035f50bbd3688456b4"},
{file = "pandas-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:213a5adf93d020b74327cb2c1b842884dbdd37f895f42dcc2f09d451d949f811"},
{file = "pandas-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c13b81a9347eb8c7548f53fd9a4f08d4dfe996836543f805c987bafa03317ae"},
{file = "pandas-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c6ecbac99a354a051ef21c5307601093cb9e0f4b1855984a084bfec9302699e"},
{file = "pandas-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6f048aa0fd080d6a06cc7e7537c09b53be6642d330ac6f54a600c3ace857ee9"},
{file = "pandas-2.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0064187b80a5be6f2f9c9d6bdde29372468751dfa89f4211a3c5871854cfbf7a"},
{file = "pandas-2.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac8c320bded4718b298281339c1a50fb00a6ba78cb2a63521c39bec95b0209b"},
{file = "pandas-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:114c2fe4f4328cf98ce5716d1532f3ab79c5919f95a9cfee81d9140064a2e4d6"},
{file = "pandas-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:48fa91c4dfb3b2b9bfdb5c24cd3567575f4e13f9636810462ffed8925352be5a"},
{file = "pandas-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:12d039facec710f7ba305786837d0225a3444af7bbd9c15c32ca2d40d157ed8b"},
{file = "pandas-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c624b615ce97864eb588779ed4046186f967374185c047070545253a52ab2d57"},
{file = "pandas-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0cee69d583b9b128823d9514171cabb6861e09409af805b54459bd0c821a35c2"},
{file = "pandas-2.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2319656ed81124982900b4c37f0e0c58c015af9a7bbc62342ba5ad07ace82ba9"},
{file = "pandas-2.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b37205ad6f00d52f16b6d09f406434ba928c1a1966e2771006a9033c736d30d2"},
{file = "pandas-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:837248b4fc3a9b83b9c6214699a13f069dc13510a6a6d7f9ba33145d2841a012"},
{file = "pandas-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d2c3554bd31b731cd6490d94a28f3abb8dd770634a9e06eb6d2911b9827db370"},
{file = "pandas-2.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88080a0ff8a55eac9c84e3ff3c7665b3b5476c6fbc484775ca1910ce1c3e0b87"},
{file = "pandas-2.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d4a558c7620340a0931828d8065688b3cc5b4c8eb674bcaf33d18ff4a6870b4a"},
{file = "pandas-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45178cf09d1858a1509dc73ec261bf5b25a625a389b65be2e47b559905f0ab6a"},
{file = "pandas-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77cefe00e1b210f9c76c697fedd8fdb8d3dd86563e9c8adc9fa72b90f5e9e4c2"},
{file = "pandas-2.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13bd629c653856f00c53dc495191baa59bcafbbf54860a46ecc50d3a88421a96"},
{file = "pandas-2.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:36d627906fd44b5fd63c943264e11e96e923f8de77d6016dc2f667b9ad193438"},
{file = "pandas-2.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:a9d7ec92d71a420185dec44909c32e9a362248c4ae2238234b76d5be37f208cc"},
{file = "pandas-2.3.2.tar.gz", hash = "sha256:ab7b58f8f82706890924ccdfb5f48002b83d2b5a3845976a9fb705d36c34dcdb"},
]
[package.dependencies]
numpy = [
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
tzdata = ">=2022.7"
[package.extras]
all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"]
aws = ["s3fs (>=2022.11.0)"]
clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"]
compression = ["zstandard (>=0.19.0)"]
computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"]
consortium-standard = ["dataframe-api-compat (>=0.1.7)"]
excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"]
feather = ["pyarrow (>=10.0.1)"]
fss = ["fsspec (>=2022.11.0)"]
gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"]
hdf5 = ["tables (>=3.8.0)"]
html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"]
mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"]
output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"]
parquet = ["pyarrow (>=10.0.1)"]
performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
plot = ["matplotlib (>=3.6.3)"]
postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
pyarrow = ["pyarrow (>=10.0.1)"]
spss = ["pyreadstat (>=1.2.0)"]
sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.9.2)"]
[[package]]
name = "pathspec"
version = "0.12.1"
@ -617,6 +825,30 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-a
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.14.1)"]
[[package]]
name = "plotly"
version = "6.3.0"
description = "An open-source interactive data visualization library for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "plotly-6.3.0-py3-none-any.whl", hash = "sha256:7ad806edce9d3cdd882eaebaf97c0c9e252043ed1ed3d382c3e3520ec07806d4"},
{file = "plotly-6.3.0.tar.gz", hash = "sha256:8840a184d18ccae0f9189c2b9a2943923fd5cae7717b723f36eef78f444e5a73"},
]
[package.dependencies]
narwhals = ">=1.15.1"
packaging = "*"
[package.extras]
dev = ["plotly[dev-optional]"]
dev-build = ["build", "jupyter", "plotly[dev-core]"]
dev-core = ["pytest", "requests", "ruff (==0.11.12)"]
dev-optional = ["anywidget", "colorcet", "fiona (<=1.9.6) ; python_version <= \"3.8\"", "geopandas", "inflect", "numpy", "orjson", "pandas", "pdfrw", "pillow", "plotly-geo", "plotly[dev-build]", "plotly[kaleido]", "polars[timezone]", "pyarrow", "pyshp", "pytz", "scikit-image", "scipy", "shapely", "statsmodels", "vaex ; python_version <= \"3.9\"", "xarray"]
express = ["numpy"]
kaleido = ["kaleido (>=1.0.0)"]
[[package]]
name = "pluggy"
version = "1.6.0"
@ -689,8 +921,8 @@ astroid = ">=3.3.8,<=3.4.0.dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
{version = ">=0.3.6", markers = "python_version >= \"3.11\""},
{version = ">=0.3.7", markers = "python_version >= \"3.12\""},
{version = ">=0.3.6", markers = "python_version == \"3.11\""},
]
isort = ">=4.2.5,<5.13 || >5.13,<7"
mccabe = ">=0.6,<0.8"
@ -746,6 +978,62 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pytz"
version = "2025.2"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"},
{file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
]
[[package]]
name = "quantiphy"
version = "2.20"
description = "physical quantities (numbers with units)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "quantiphy-2.20-py3-none-any.whl", hash = "sha256:afdf5f9d1cc87359bd7daf19dc1cb23808eb2e264d9395b36ca6527fb4d71b3a"},
{file = "quantiphy-2.20.tar.gz", hash = "sha256:ba5375ac55c3b90077a793588dd5a88aaf81b2c3b0fc9c9359513ac39f6ed84d"},
]
[[package]]
name = "referencing"
version = "0.36.2"
description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"},
{file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"},
]
[package.dependencies]
attrs = ">=22.2.0"
rpds-py = ">=0.7.0"
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
[[package]]
name = "requests"
version = "2.32.5"
@ -768,6 +1056,267 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rpds-py"
version = "0.27.1"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef"},
{file = "rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1"},
{file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10"},
{file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808"},
{file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8"},
{file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9"},
{file = "rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4"},
{file = "rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1"},
{file = "rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881"},
{file = "rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a"},
{file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde"},
{file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21"},
{file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9"},
{file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948"},
{file = "rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39"},
{file = "rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15"},
{file = "rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746"},
{file = "rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90"},
{file = "rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a"},
{file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444"},
{file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a"},
{file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1"},
{file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998"},
{file = "rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39"},
{file = "rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594"},
{file = "rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502"},
{file = "rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b"},
{file = "rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d"},
{file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274"},
{file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd"},
{file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2"},
{file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002"},
{file = "rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3"},
{file = "rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83"},
{file = "rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d"},
{file = "rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228"},
{file = "rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21"},
{file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef"},
{file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081"},
{file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd"},
{file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7"},
{file = "rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688"},
{file = "rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797"},
{file = "rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334"},
{file = "rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9"},
{file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60"},
{file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e"},
{file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212"},
{file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675"},
{file = "rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3"},
{file = "rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456"},
{file = "rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3"},
{file = "rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2"},
{file = "rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48"},
{file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb"},
{file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734"},
{file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb"},
{file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0"},
{file = "rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a"},
{file = "rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772"},
{file = "rpds_py-0.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c918c65ec2e42c2a78d19f18c553d77319119bf43aa9e2edf7fb78d624355527"},
{file = "rpds_py-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fea2b1a922c47c51fd07d656324531adc787e415c8b116530a1d29c0516c62d"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbf94c58e8e0cd6b6f38d8de67acae41b3a515c26169366ab58bdca4a6883bb8"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2a8fed130ce946d5c585eddc7c8eeef0051f58ac80a8ee43bd17835c144c2cc"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:037a2361db72ee98d829bc2c5b7cc55598ae0a5e0ec1823a56ea99374cfd73c1"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5281ed1cc1d49882f9997981c88df1a22e140ab41df19071222f7e5fc4e72125"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd50659a069c15eef8aa3d64bbef0d69fd27bb4a50c9ab4f17f83a16cbf8905"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:c4b676c4ae3921649a15d28ed10025548e9b561ded473aa413af749503c6737e"},
{file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:079bc583a26db831a985c5257797b2b5d3affb0386e7ff886256762f82113b5e"},
{file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4e44099bd522cba71a2c6b97f68e19f40e7d85399de899d66cdb67b32d7cb786"},
{file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e202e6d4188e53c6661af813b46c37ca2c45e497fc558bacc1a7630ec2695aec"},
{file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f41f814b8eaa48768d1bb551591f6ba45f87ac76899453e8ccd41dba1289b04b"},
{file = "rpds_py-0.27.1-cp39-cp39-win32.whl", hash = "sha256:9e71f5a087ead99563c11fdaceee83ee982fd39cf67601f4fd66cb386336ee52"},
{file = "rpds_py-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:71108900c9c3c8590697244b9519017a400d9ba26a36c48381b3f64743a44aab"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b"},
{file = "rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6"},
{file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa8933159edc50be265ed22b401125c9eebff3171f570258854dbce3ecd55475"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50431bf02583e21bf273c71b89d710e7a710ad5e39c725b14e685610555926f"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78af06ddc7fe5cc0e967085a9115accee665fb912c22a3f54bad70cc65b05fe6"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70d0738ef8fee13c003b100c2fbd667ec4f133468109b3472d249231108283a3"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2f6fd8a1cea5bbe599b6e78a6e5ee08db434fc8ffea51ff201c8765679698b3"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8177002868d1426305bb5de1e138161c2ec9eb2d939be38291d7c431c4712df8"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008b839781d6c9bf3b6a8984d1d8e56f0ec46dc56df61fd669c49b58ae800400"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:a55b9132bb1ade6c734ddd2759c8dc132aa63687d259e725221f106b83a0e485"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a46fdec0083a26415f11d5f236b79fa1291c32aaa4a17684d82f7017a1f818b1"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8a63b640a7845f2bdd232eb0d0a4a2dd939bcdd6c57e6bb134526487f3160ec5"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e32721e5d4922deaaf963469d795d5bde6093207c52fec719bd22e5d1bedbc4"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c426b99a068601b5f4623573df7a7c3d72e87533a2dd2253353a03e7502566c"},
{file = "rpds_py-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fc9b7fe29478824361ead6e14e4f5aed570d477e06088826537e202d25fe859"},
{file = "rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8"},
]
[[package]]
name = "scipy"
version = "1.15.3"
description = "Fundamental algorithms for scientific computing in Python"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"},
{file = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"},
{file = "scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f"},
{file = "scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92"},
{file = "scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82"},
{file = "scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40"},
{file = "scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e"},
{file = "scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c"},
{file = "scipy-1.15.3-cp310-cp310-win_amd64.whl", hash = "sha256:9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13"},
{file = "scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b"},
{file = "scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba"},
{file = "scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65"},
{file = "scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1"},
{file = "scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889"},
{file = "scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982"},
{file = "scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9"},
{file = "scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594"},
{file = "scipy-1.15.3-cp311-cp311-win_amd64.whl", hash = "sha256:ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb"},
{file = "scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019"},
{file = "scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6"},
{file = "scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477"},
{file = "scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c"},
{file = "scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45"},
{file = "scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49"},
{file = "scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e"},
{file = "scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539"},
{file = "scipy-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed"},
{file = "scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759"},
{file = "scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62"},
{file = "scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb"},
{file = "scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730"},
{file = "scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825"},
{file = "scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7"},
{file = "scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11"},
{file = "scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126"},
{file = "scipy-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163"},
{file = "scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8"},
{file = "scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5"},
{file = "scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e"},
{file = "scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb"},
{file = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723"},
{file = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb"},
{file = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4"},
{file = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5"},
{file = "scipy-1.15.3-cp313-cp313t-win_amd64.whl", hash = "sha256:76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca"},
{file = "scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf"},
]
[package.dependencies]
numpy = ">=1.23.5,<2.5"
[package.extras]
dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"]
doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"]
test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
[[package]]
name = "sigmf"
version = "1.2.10"
description = "Easily interact with Signal Metadata Format (SigMF) recordings."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "sigmf-1.2.10-py3-none-any.whl", hash = "sha256:a9a7f7d0d48c350a78fc48ab14baa0fb7fa2441855467f3593d4c591d2018c11"},
{file = "sigmf-1.2.10.tar.gz", hash = "sha256:0301a312abb9d34e090798a67886ea0aa2bf796242d856e49c8707fd3d3c9255"},
]
[package.dependencies]
jsonschema = "*"
numpy = "*"
[package.extras]
apps = ["scipy"]
test = ["hypothesis", "pylint", "pytest", "pytest-cov"]
[[package]]
name = "six"
version = "1.17.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
]
[[package]]
name = "sniffio"
version = "1.3.1"
@ -1091,12 +1640,24 @@ version = "4.15.0"
description = "Backported and Experimental Type Hints for Python 3.9+"
optional = false
python-versions = ">=3.9"
groups = ["dev", "docs", "test"]
groups = ["main", "dev", "docs", "test"]
files = [
{file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
]
markers = {dev = "python_version == \"3.10\"", docs = "python_version <= \"3.12\"", test = "python_version == \"3.10\""}
markers = {main = "python_version <= \"3.12\"", dev = "python_version == \"3.10\"", docs = "python_version <= \"3.12\"", test = "python_version == \"3.10\""}
[[package]]
name = "tzdata"
version = "2025.2"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
groups = ["main"]
files = [
{file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"},
{file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"},
]
[[package]]
name = "urllib3"
@ -1359,4 +1920,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = ">=3.10"
content-hash = "a2ed0693cec759249f9acbd93cb01d30a35512fd4cb6c8be18cd1a734d02570c"
content-hash = "8fafbb6cdc3f1490399a4cea9520376c1743cf30b3430d9c3ad35ff5754bb850"

View File

@ -38,7 +38,13 @@ classifiers = [
"Typing :: Typed"
]
dependencies = [
"numpy==1.26.4" # Pinned at 1.26.4 for compatibility with Radioconda
"numpy (==1.26.4)", # Pinned at 1.26.4 for compatibility with Radioconda
"scipy (<1.16)",
"sigmf (>=1.2.10,<2.0.0)",
"quantiphy (>=2.20,<3.0)",
"plotly (>=6.3.0,<7.0.0)",
"h5py (>=3.14.0,<4.0.0)",
"pandas (>=2.3.2,<3.0.0)"
]
[tool.poetry]

View File

@ -1,8 +0,0 @@
"""
The Data package contains abstract data types tailored for radio machine learning, such as ``Recording``, as well
as the abstract interfaces for the radio dataset and radio dataset builder framework.
"""
__all__ = ["Annotation", "Recording"]
from .annotation import Annotation
from .recording import Recording

View File

@ -1,22 +0,0 @@
"""
The IO package contains utilities for input and output operations, such as loading and saving recordings to and from
file.
"""
__all__ = [
# Common:
"exists",
"copy",
"move",
"validate",
# Recording:
"save_recording",
"load_recording",
"to_sigmf",
"from_sigmf",
"to_npy",
"from_npy",
]
from .common import copy, exists, move, validate
from .recording import from_npy, from_sigmf, load_recording, to_npy, to_sigmf

View File

@ -1,8 +0,0 @@
"""
The transforms package houses a collection of functions to manipulate and transform radio data.
This package contains various functions that operate on NumPy arrays. These functions are utilized within the machine
learning backends to build transforms and functions that seamlessly integrate with those from the respective backend.
All the transforms in this package expect data in the complex 1xN format.
"""

View File

@ -1,18 +0,0 @@
from numpy.typing import NDArray
def bytes_to_samples(data: bytes) -> NDArray:
"""Convert bytes to IQ samples, in the complex 1xN format.
:param data: Array of bytes
:type data: bytes
:return: Tape of IQ samples, as numpy complex type
:rtype: np.ndarray
"""
# samples = np.frombuffer(data, dtype=np.int16).astype(np.float32)
# samples /= 2048
# samples = samples[::2] + 1j * samples[1::2]
# # samples = samples.view(np.complex64)
# return samples
raise NotImplementedError

View File

@ -1,12 +0,0 @@
"""
The package contains assorted plotting and report generation utilities to help visualize RIA components such as
recordings and radio datasets.
"""
__all__ = [
"view_annotations",
"view_channels",
"view_sig",
]
from .view_signal import view_annotations, view_channels, view_sig

View File

@ -0,0 +1,8 @@
"""
The datatypes package contains abstract data types tailored for radio machine learning.
"""
__all__ = ["Annotation", "Recording"]
from .annotation import Annotation
from .recording import Recording

View File

@ -7,9 +7,9 @@ from typing import Any, Optional
from packaging.version import Version
from utils._utils.abstract_attribute import abstract_attribute
from utils.data.datasets.license.dataset_license import DatasetLicense
from utils.data.datasets.radio_dataset import RadioDataset
from ria_toolkit_oss.utils.abstract_attribute import abstract_attribute
from ria_toolkit_oss.datatypes.datasets.license.dataset_license import DatasetLicense
from ria_toolkit_oss.datatypes.datasets.radio_dataset import RadioDataset
class DatasetBuilder(ABC):

View File

@ -7,11 +7,11 @@ from typing import Optional
import h5py
import numpy as np
from utils.data.datasets.h5helpers import (
from ria_toolkit_oss.datatypes.datasets.h5helpers import (
append_entry_inplace,
copy_dataset_entry_by_index,
)
from utils.data.datasets.radio_dataset import RadioDataset
from ria_toolkit_oss.datatypes.datasets.radio_dataset import RadioDataset
class IQDataset(RadioDataset, ABC):
@ -19,7 +19,7 @@ class IQDataset(RadioDataset, ABC):
radiofrequency (RF) signals represented as In-phase (I) and Quadrature (Q) samples.
For machine learning tasks that involve processing spectrograms, please use
utils.data.datasets.SpectDataset instead.
ria_toolkit_oss.datatypes.datasets.SpectDataset instead.
This is an abstract interface defining common properties and behaviour of IQDatasets. Therefore, this class
should not be instantiated directly. Instead, it is subclassed to define custom interfaces for specific machine

View File

@ -8,7 +8,7 @@ Common license types for datasets courtesy of the University of Calgary:
.. note::
License descriptions are provided for informational purposes only and should not be construed as legal advice.
For legal guidance, please refer to official licence documentation and consult with legal professionals specializing
For legal guidance, please refer to official license documentation and consult with legal professionals specializing
in software and dataset licensing.
.. note::
@ -41,7 +41,7 @@ PUBLIC_DOMAIN = DatasetLicense(
identifier=None,
description="Technically not a license, the public domain mark relinquishes all rights to a dataset and "
"dedicates the dataset to the public domain.",
licence="https://creativecommons.org/public-domain/pdm/",
license="https://creativecommons.org/public-domain/pdm/",
)
"""
`Public Domain <https://creativecommons.org/public-domain/pdm/>`_: Technically not a license, the public domain mark
@ -54,7 +54,7 @@ CC_0 = DatasetLicense(
identifier="CC0-1.0",
description="A Creative Commons license and is like a public domain dedication. The copyright holder "
"surrenders rights in a dataset using this license.",
licence="https://creativecommons.org/publicdomain/zero/1.0/",
license="https://creativecommons.org/publicdomain/zero/1.0/",
)
"""
`Creative Commons Public Domain Dedication <https://creativecommons.org/public-domain/pdm/>`_: A Creative Commons
@ -67,7 +67,7 @@ ODC_PDDL = DatasetLicense(
identifier="PDDL-1.0",
description="This license is one of the Open Data Commons licenses and is like a public domain dedication. "
"The copyright holder surrenders rights in a dataset using this license.",
licence="https://opendatacommons.org/licenses/pddl/",
license="https://opendatacommons.org/licenses/pddl/",
)
"""
`Open Data Commons Public Domain Dedication and License <https://opendatacommons.org/licenses/pddl/>`_: This license
@ -81,7 +81,7 @@ CC_BY = DatasetLicense(
identifier="CC-BY-4.0",
description="This license is one of the open Creative Commons licenses and allows users to share and adapt "
"the dataset so long as they give credit to the copyright holder.",
licence="https://creativecommons.org/licenses/by/4.0/",
license="https://creativecommons.org/licenses/by/4.0/",
)
"""
`Creative Commons Attribution 4.0 International <https://creativecommons.org/licenses/by/4.0/>`_: This license is one
@ -95,7 +95,7 @@ ODC_BY = DatasetLicense(
identifier="ODC-By-1.0",
description="This license is one of the Open Data Commons licenses and allows users to share and adapt the "
"dataset as long as they give credit to the copyright holder.",
licence="https://opendatacommons.org/licenses/by/",
license="https://opendatacommons.org/licenses/by/",
)
"""
`Open Data Commons Attribution License <https://opendatacommons.org/licenses/by/>`_: This license is one of the Open
@ -110,7 +110,7 @@ CC_BY_SA = DatasetLicense(
description="This license is one of the open Creative Commons licenses and allows users to share and adapt "
"the dataset as long as they give credit to the copyright holder and distribute any additions, "
"transformations or changes to the dataset under this same license.",
licence="https://creativecommons.org/licenses/by-sa/4.0/",
license="https://creativecommons.org/licenses/by-sa/4.0/",
)
"""
`Creative Commons Attribution-ShareAlike 4.0 International <https://creativecommons.org/licenses/by-sa/4.0/>`_: This
@ -126,7 +126,7 @@ ODC_ODbL = DatasetLicense(
description="This license is one of the Open Data Commons licenses and allows users to share and adapt the "
"dataset as long as they give credit to the copyright holder and distribute any additions, "
"transformation or changes to the dataset.",
licence="https://opendatacommons.org/licenses/odbl/",
license="https://opendatacommons.org/licenses/odbl/",
)
"""
`Open Data Commons Open Database License <https://opendatacommons.org/licenses/odbl/>`_: This license is one of the
@ -141,7 +141,7 @@ CC_BY_NC = DatasetLicense(
description="This license is one of the Creative Commons licenses and allows users to share and adapt the "
"dataset if they give credit to the copyright holder and do not use the dataset for any "
"commercial purposes.",
licence="https://creativecommons.org/licenses/by-nc/4.0/",
license="https://creativecommons.org/licenses/by-nc/4.0/",
)
"""
`Creative Commons Attribution-NonCommercial 4.0 International <https://creativecommons.org/licenses/by-nc/4.0/>`_: This
@ -156,7 +156,7 @@ CC_BY_ND = DatasetLicense(
description="This license is one of the Creative Commons licenses and allows users to share the dataset if "
"they give credit to copyright holder, but they cannot make any additions, transformations or "
"changes to the dataset under this license.",
licence="https://creativecommons.org/licenses/by-nd/4.0/",
license="https://creativecommons.org/licenses/by-nd/4.0/",
)
"""
`Creative Commons Attribution-NoDerivatives 4.0 International <https://creativecommons.org/licenses/by-nd/4.0/>`_: This
@ -172,7 +172,7 @@ CC_BY_NC_SA = DatasetLicense(
"if they (1) give credit to the copyright holder, (2) do not use the dataset for any commercial "
"purposes, and (3) distribute any additions, transformations or changes to the dataset under this "
"same license.",
licence="https://creativecommons.org/licenses/by-nc-sa/4.0/",
license="https://creativecommons.org/licenses/by-nc-sa/4.0/",
)
"""
`Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International
@ -190,7 +190,7 @@ CC_BY_NC_ND = DatasetLicense(
"unmodified dataset if they give credit to the copyright holder and do not share it for "
"commercial purposes. Users cannot make any additions, transformations or changes to the dataset"
"under this license.",
licence="https://creativecommons.org/licenses/by-nc-nd/4.0/",
license="https://creativecommons.org/licenses/by-nc-nd/4.0/",
)
"""
`Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International
@ -204,7 +204,7 @@ RESTRICTED = DatasetLicense(
name="Restricted (All Rights Reserved)",
identifier="Restricted",
description="All rights reserved. No permissions granted for use, modification, or distribution of the dataset.",
licence="Restricted (All Rights Reserved)",
license="Restricted (All Rights Reserved)",
)
"""
Restricted (All Rights Reserved): No permissions granted for use, modification, or distribution of the dataset.

View File

@ -10,4 +10,4 @@ class DatasetLicense:
name: str #: The name or title of the license.
identifier: str | None #: SPDX short identifier, or None if one does not exist.
description: str #: A description of the license.
licence: str #: Full license text or URL if the license is available online.
license: str #: Full license text or URL if the license is available online.

View File

@ -12,7 +12,7 @@ import numpy as np
import pandas as pd
from numpy.typing import ArrayLike
from utils.data.datasets.h5helpers import (
from ria_toolkit_oss.datatypes.datasets.h5helpers import (
append_entry_inplace,
copy_file,
copy_over_example,
@ -29,7 +29,7 @@ class RadioDataset(ABC):
This is an abstract interface defining common properties and behavior of radio datasets. Therefore, this class
should not be instantiated directly. Instead, it should be subclassed to define specific interfaces for different
types of radio datasets. For example, see utils.data.datasets.IQDataset, which is a radio dataset
types of radio datasets. For example, see ria_toolkit_oss.datatypes.datasets.IQDataset, which is a radio dataset
subclass tailored for tasks involving the processing of radio signals represented as IQ (In-phase and Quadrature)
samples.

View File

@ -3,7 +3,7 @@ from __future__ import annotations
import os
from abc import ABC
from utils.data.datasets.radio_dataset import RadioDataset
from ria_toolkit_oss.datatypes.datasets.radio_dataset import RadioDataset
class SpectDataset(RadioDataset, ABC):
@ -13,7 +13,7 @@ class SpectDataset(RadioDataset, ABC):
radio signal spectrograms.
For machine learning tasks that involve processing on IQ samples, please use
utils.data.datasets.IQDataset instead.
ria_toolkit_oss.datatypes.datasets.IQDataset instead.
This is an abstract interface defining common properties and behaviour of IQDatasets. Therefore, this class
should not be instantiated directly. Instead, it is subclassed to define custom interfaces for specific machine

View File

@ -6,8 +6,8 @@ from typing import Optional
import numpy as np
from numpy.random import Generator
from utils.data.datasets import RadioDataset
from utils.data.datasets.h5helpers import copy_over_example, make_empty_clone
from ria_toolkit_oss.datatypes.datasets import RadioDataset
from ria_toolkit_oss.datatypes.datasets.h5helpers import copy_over_example, make_empty_clone
def split(dataset: RadioDataset, lengths: list[int | float]) -> list[RadioDataset]:
@ -28,7 +28,7 @@ def split(dataset: RadioDataset, lengths: list[int | float]) -> list[RadioDatase
cases.
This function is deterministic, meaning it will always produce the same split. For a random split, see
utils.data.datasets.random_split.
ria_toolkit_oss.datatypes.datasets.random_split.
:param dataset: Dataset to be split.
:type dataset: RadioDataset
@ -47,7 +47,7 @@ def split(dataset: RadioDataset, lengths: list[int | float]) -> list[RadioDatase
>>> import string
>>> import numpy as np
>>> import pandas as pd
>>> from utils.data.datasets import split
>>> from ria_toolkit_oss.datatypes.datasets import split
First, let's generate some random data:
@ -123,7 +123,7 @@ def random_split(
training and test datasets.
This restriction makes it unlikely that a random split will produce datasets with the exact lengths specified.
If it is important to ensure the closest possible split, consider using utils.data.datasets.split instead.
If it is important to ensure the closest possible split, consider using ria_toolkit_oss.datatypes.datasets.split instead.
:param dataset: Dataset to be split.
:type dataset: RadioDataset
@ -140,7 +140,7 @@ def random_split(
:rtype: list of RadioDataset
See Also:
utils.data.datasets.split: Usage is the same as for ``random_split()``.
ria_toolkit_oss.datatypes.datasets.split: Usage is the same as for ``random_split()``.
"""
if not isinstance(dataset, RadioDataset):
raise ValueError(f"'dataset' must be RadioDataset or one of its subclasses, got {type(dataset)}.")

View File

@ -14,7 +14,7 @@ import numpy as np
from numpy.typing import ArrayLike
from quantiphy import Quantity
from utils.data.annotation import Annotation
from ria_toolkit_oss.datatypes.annotation import Annotation
PROTECTED_KEYS = ["rec_id", "timestamp"]
@ -28,7 +28,7 @@ class Recording:
Metadata is stored in a dictionary of key value pairs,
to include information such as sample_rate and center_frequency.
Annotations are a list of :ref:`Annotation <utils.data.Annotation>`,
Annotations are a list of :ref:`Annotation <ria_toolkit_oss.datatypes.Annotation>`,
defining bounding boxes in time and frequency with labels and metadata.
Here, signal data is represented as a NumPy array. This class is then extended in the RIA Backends to provide
@ -68,7 +68,7 @@ class Recording:
**Examples:**
>>> import numpy
>>> from utils.data import Recording, Annotation
>>> from ria_toolkit_oss.datatypes import Recording, Annotation
>>> # Create an array of complex samples, just 1s in this case.
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
@ -233,7 +233,7 @@ class Recording:
:return: Data-type of the data array's elements.
:type: numpy dtype object
"""
return self.data.dtype
return self.datatypes.dtype
@property
def timestamp(self) -> float | int:
@ -282,7 +282,7 @@ class Recording:
# cross-platform support where the types are aliased across platforms.
with warnings.catch_warnings():
warnings.simplefilter("ignore") # Casting may generate user warnings. E.g., complex -> real
data = self.data.astype(dtype)
data = self.datatypes.astype(dtype)
if np.iscomplexobj(data):
return Recording(data=data, metadata=self.metadata, annotations=self.annotations)
@ -307,7 +307,7 @@ class Recording:
Create a recording and add metadata:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>>
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
@ -362,7 +362,7 @@ class Recording:
Create a recording and update metadata:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
@ -416,7 +416,7 @@ class Recording:
Create a recording and add metadata:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
@ -449,7 +449,7 @@ class Recording:
:param output_path: The output image path. Defaults to "images/signal.png".
:type output_path: str, optional
:param kwargs: Keyword arguments passed on to utils.view.view_sig.
:param kwargs: Keyword arguments passed on to ria_toolkit_oss.view.view_sig.
:type: dict of keyword arguments
**Examples:**
@ -457,7 +457,7 @@ class Recording:
Create a recording and view it as a plot in a .png image:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
@ -468,7 +468,7 @@ class Recording:
>>> recording = Recording(data=samples, metadata=metadata)
>>> recording.view()
"""
from utils.view import view_sig
from ria_toolkit_oss.view import view_sig
view_sig(recording=self, output_path=output_path, **kwargs)
@ -478,7 +478,7 @@ class Recording:
The SigMF io format is defined by the `SigMF Specification Project <https://github.com/sigmf/SigMF>`_
:param recording: The recording to be written to file.
:type recording: utils.data.Recording
:type recording: ria_toolkit_oss.datatypes.Recording
:param filename: The name of the file where the recording is to be saved. Defaults to auto generated filename.
:type filename: os.PathLike or str, optional
:param path: The directory path to where the recording is to be saved. Defaults to recordings/.
@ -493,7 +493,7 @@ class Recording:
Create a recording and view it as a plot in a `.png` image:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
@ -504,7 +504,7 @@ class Recording:
>>> recording = Recording(data=samples, metadata=metadata)
>>> recording.view()
"""
from utils.io.recording import to_sigmf
from ria_toolkit_oss.io.recording import to_sigmf
to_sigmf(filename=filename, path=path, recording=self)
@ -526,7 +526,7 @@ class Recording:
Create a recording and save it to a .npy file:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
@ -537,43 +537,43 @@ class Recording:
>>> recording = Recording(data=samples, metadata=metadata)
>>> recording.to_npy()
"""
from utils.io.recording import to_npy
from ria_toolkit_oss.io.recording import to_npy
to_npy(recording=self, filename=filename, path=path)
def trim(self, num_samples: int, start_sample: Optional[int] = 0) -> Recording:
"""Trim Recording samples to a desired length, shifting annotations to maintain alignment.
:param start_sample: The start index of the desired trimmed recording. Defaults to 0.
:type start_sample: int, optional
:param num_samples: The number of samples that the output trimmed recording will have.
:type num_samples: int
:raises IndexError: If start_sample + num_samples is greater than the length of the recording.
:raises IndexError: If sample_start < 0 or num_samples < 0.
:param start_sample: The start index of the desired trimmed recording. Defaults to 0.
:type start_sample: int, optional
:param num_samples: The number of samples that the output trimmed recording will have.
:type num_samples: int
:raises IndexError: If start_sample + num_samples is greater than the length of the recording.
:raises IndexError: If sample_start < 0 or num_samples < 0.
:return: The trimmed Recording.
:rtype: Recording
:return: The trimmed Recording.
:rtype: Recording
**Examples:**
Create a recording and trim it:
Create a recording and trim it:
>>> import numpy
>>> from utils.data import Recording
>>> import numpy
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
... "sample_rate": 1e6,
... "center_frequency": 2.44e9,
... }
>>> samples = numpy.ones(10000, dtype=numpy.complex64)
>>> metadata = {
... "sample_rate": 1e6,
... "center_frequency": 2.44e9,
... }
>>> recording = Recording(data=samples, metadata=metadata)
>>> print(len(recording))
10000
>>> recording = Recording(data=samples, metadata=metadata)
>>> print(len(recording))
10000
>>> trimmed_recording = recording.trim(start_sample=1000, num_samples=1000)
>>> print(len(trimmed_recording))
1000
>>> trimmed_recording = recording.trim(start_sample=1000, num_samples=1000)
>>> print(len(trimmed_recording))
1000
"""
if start_sample < 0:
@ -613,7 +613,7 @@ class Recording:
Create a recording with maximum amplitude 0.5 and normalize to a maximum amplitude of 1:
>>> import numpy
>>> from utils.data import Recording
>>> from ria_toolkit_oss.datatypes import Recording
>>> samples = numpy.ones(10000, dtype=numpy.complex64) * 0.5
>>> metadata = {

View File

@ -0,0 +1,15 @@
"""
The IO package contains utilities for input and output operations, such as loading and saving recordings to and from
file.
"""
__all__ = [
# Recording:
"load_rec",
"to_sigmf",
"from_sigmf",
"to_npy",
"from_npy",
]
from .recording import from_npy, from_sigmf, load_rec, to_npy, to_sigmf

View File

@ -1,5 +1,5 @@
"""
Utilities for input/output operations on the utils.data.Recording object.
Utilities for input/output operations on the ria_toolkit_oss.datatypes.Recording object.
"""
import datetime as dt
@ -12,8 +12,8 @@ import sigmf
from sigmf import SigMFFile, sigmffile
from sigmf.utils import get_data_type_str
from utils.data import Annotation
from utils.data.recording import Recording
from ria_toolkit_oss.datatypes import Annotation
from ria_toolkit_oss.datatypes.recording import Recording
def load_rec(file: os.PathLike) -> Recording:
@ -29,7 +29,7 @@ def load_rec(file: os.PathLike) -> Recording:
:raises ValueError: If the inferred file extension is not supported.
:return: The recording, as initialized from file(s).
:rtype: utils.data.Recording
:rtype: ria_toolkit_oss.datatypes.Recording
"""
_, extension = os.path.splitext(file)
extension = extension.lstrip(".")
@ -98,7 +98,7 @@ def to_sigmf(recording: Recording, filename: Optional[str] = None, path: Optiona
The SigMF io format is defined by the `SigMF Specification Project <https://github.com/sigmf/SigMF>`_
:param recording: The recording to be written to file.
:type recording: utils.data.Recording
:type recording: ria_toolkit_oss.datatypes.Recording
:param filename: The name of the file where the recording is to be saved. Defaults to auto generated filename.
:type filename: os.PathLike or str, optional
:param path: The directory path to where the recording is to be saved. Defaults to recordings/.
@ -110,9 +110,9 @@ def to_sigmf(recording: Recording, filename: Optional[str] = None, path: Optiona
**Examples:**
>>> from utils.sdr import Synth
>>> from utils.data import Recording
>>> from utils.io import to_sigmf
>>> from ria_toolkit_oss.sdr import Synth
>>> from ria_toolkit_oss.data import Recording
>>> from ria_toolkit_oss.io import to_sigmf
>>> sdr = Synth()
>>> rec = sdr.record(center_frequency=2.4e9, sample_rate=20e6)
>>> to_sigmf(recording=rec, file="sample_recording")
@ -202,7 +202,7 @@ def from_sigmf(file: os.PathLike | str) -> Recording:
:raises IOError: If there is an issue encountered during the file reading process.
:return: The recording, as initialized from the SigMF files.
:rtype: utils.data.Recording
:rtype: ria_toolkit_oss.datatypes.Recording
"""
if len(file) > 11:
@ -253,7 +253,7 @@ def to_npy(recording: Recording, filename: Optional[str] = None, path: Optional[
"""Write recording to ``.npy`` binary file.
:param recording: The recording to be written to file.
:type recording: utils.data.Recording
:type recording: ria_toolkit_oss.datatypes.Recording
:param filename: The name of the file where the recording is to be saved. Defaults to auto generated filename.
:type filename: os.PathLike or str, optional
:param path: The directory path to where the recording is to be saved. Defaults to recordings/.
@ -266,9 +266,9 @@ def to_npy(recording: Recording, filename: Optional[str] = None, path: Optional[
**Examples:**
>>> from utils.sdr import Synth
>>> from utils.data import Recording
>>> from utils.io import to_npy
>>> from ria_toolkit_oss.sdr import Synth
>>> from ria_toolkit_oss.data import Recording
>>> from ria_toolkit_oss.io import to_npy
>>> sdr = Synth()
>>> rec = sdr.record(center_frequency=2.4e9, sample_rate=20e6)
>>> to_npy(recording=rec, file="sample_recording.npy")
@ -308,7 +308,7 @@ def from_npy(file: os.PathLike | str) -> Recording:
:raises IOError: If there is an issue encountered during the file reading process.
:return: The recording, as initialized from the ``.npy`` file.
:rtype: utils.data.Recording
:rtype: ria_toolkit_oss.datatypes.Recording
"""
filename, extension = os.path.splitext(file)

View File

@ -0,0 +1,8 @@
"""
The transforms module houses a collection of functions to manipulate and transform radio data.
This module contains various functions that operate on NumPy arrays. These functions are utilized within the machine
learning backends to build transforms and functions that seamlessly integrate with those from the respective backend.
All the transforms in this module expect data in the complex 1xN format.
"""

View File

@ -1,5 +1,5 @@
"""
This module comprises the functionals of various transforms designed to create new training examples by augmenting
This submodule comprises the functionals of various transforms designed to create new training examples by augmenting
existing examples or recordings using a variety of techniques These transforms take an ArrayLike object as input
and return a corresponding numpy.ndarray with the impairment model applied;
we call the latter the impaired data.
@ -10,8 +10,8 @@ from typing import Optional
import numpy as np
from numpy.typing import ArrayLike
from utils.data.recording import Recording
from utils.helpers.array_conversion import convert_to_2xn
from ria_toolkit_oss.datatypes.recording import Recording
from ria_toolkit_oss.utils.array_conversion import convert_to_2xn
# TODO: For round 2 of index generation, should j be at min 2 spots away from where it was to prevent adjacent patches.
@ -28,7 +28,7 @@ def generate_awgn(signal: ArrayLike | Recording, snr: Optional[float] = 1) -> np
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param snr: The signal-to-noise ratio in dB. Default is 1.
:type snr: float, optional
@ -36,7 +36,7 @@ def generate_awgn(signal: ArrayLike | Recording, snr: Optional[float] = 1) -> np
:return: A numpy array representing the generated noise which matches the SNR of `signal`. If `signal` is a
Recording, returns a Recording object with its `data` attribute containing the generated noise array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2 + 5j, 1 + 8j]])
>>> new_rec = generate_awgn(rec)
@ -78,14 +78,14 @@ def time_reversal(signal: ArrayLike | Recording) -> np.ndarray | Recording:
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:raises ValueError: If `signal` is not CxN complex.
:return: A numpy array containing the reversed I and Q data samples if `signal` is an array.
If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute containing the
reversed array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+2j, 3+4j, 5+6j]])
>>> new_rec = time_reversal(rec)
@ -121,14 +121,14 @@ def spectral_inversion(signal: ArrayLike | Recording) -> np.ndarray | Recording:
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:raises ValueError: If `signal` is not CxN complex.
:return: A numpy array containing the original I and negated Q data samples if `signal` is an array.
If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute containing the
inverted array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[0+45j, 2-10j]])
>>> new_rec = spectral_inversion(rec)
@ -163,14 +163,14 @@ def channel_swap(signal: ArrayLike | Recording) -> np.ndarray | Recording:
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:raises ValueError: If `signal` is not CxN complex.
:return: A numpy array containing the swapped I and Q data samples if `signal` is an array.
If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute containing the
swapped array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[10+20j, 7+35j]])
>>> new_rec = channel_swap(rec)
@ -205,14 +205,14 @@ def amplitude_reversal(signal: ArrayLike | Recording) -> np.ndarray | Recording:
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:raises ValueError: If `signal` is not CxN complex.
:return: A numpy array containing the negated I and Q data samples if `signal` is an array.
If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute containing the
negated array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[4-3j, -5-2j, -9+1j]])
>>> new_rec = amplitude_reversal(rec)
@ -251,7 +251,7 @@ def drop_samples( # noqa: C901 # TODO: Simplify function
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param max_section_size: Maximum allowable size of the section to be dropped and replaced. Default is 2.
:type max_section_size: int, optional
:param fill_type: Fill option used to replace dropped section of data (back-fill, front-fill, mean, zeros).
@ -273,7 +273,7 @@ def drop_samples( # noqa: C901 # TODO: Simplify function
:return: A numpy array containing the I and Q data samples with replaced subsections if
`signal` is an array. If `signal` is a `Recording`, returns a `Recording` object with its `data`
attribute containing the array with dropped samples.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2+5j, 1+8j, 6+4j, 3+7j, 4+9j]])
>>> new_rec = drop_samples(rec)
@ -344,7 +344,7 @@ def quantize_tape(
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param bin_number: The number of bins the signal should be divided into. Default is 4.
:type bin_number: int, optional
:param rounding_type: The type of rounding applied during processing. Default is "floor".
@ -360,7 +360,7 @@ def quantize_tape(
:return: A numpy array containing the quantized I and Q data samples if `signal` is an array.
If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute containing
the quantized array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+1j, 4+4j, 1+2j, 1+4j]])
>>> new_rec = quantize_tape(rec)
@ -418,7 +418,7 @@ def quantize_parts(
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param max_section_size: Maximum allowable size of the section to be quantized. Default is 2.
:type max_section_size: int, optional
:param bin_number: The number of bins the signal should be divided into. Default is 4.
@ -436,7 +436,7 @@ def quantize_parts(
:return: A numpy array containing the I and Q data samples with quantized subsections if `signal`
is an array. If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute
containing the partially quantized array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2+5j, 1+8j, 6+4j, 3+7j, 4+9j]])
>>> new_rec = quantize_parts(rec)
@ -506,7 +506,7 @@ def magnitude_rescale(
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param starting_bounds: The bounds (inclusive) as indices in which the starting position of the rescaling occurs.
Default is None, but if user does not assign any bounds, the bounds become (random index, N-1).
:type starting_bounds: tuple, optional
@ -518,7 +518,7 @@ def magnitude_rescale(
:return: A numpy array containing the I and Q data samples with the rescaled magnitude after the random
starting point if `signal` is an array. If `signal` is a `Recording`, returns a `Recording`
object with its `data` attribute containing the rescaled array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2+5j, 1+8j, 6+4j, 3+7j, 4+9j]])
>>> new_rec = magniute_rescale(rec)
@ -567,7 +567,7 @@ def cut_out( # noqa: C901 # TODO: Simplify function
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param max_section_size: Maximum allowable size of the section to be quantized. Default is 3.
:type max_section_size: int, optional
:param fill_type: Fill option used to replace cutout section of data (zeros, ones, low-snr, avg-snr-1, avg-snr-2).
@ -592,7 +592,7 @@ def cut_out( # noqa: C901 # TODO: Simplify function
:return: A numpy array containing the I and Q data samples with random sections cut out and replaced according to
`fill_type` if `signal` is an array. If `signal` is a `Recording`, returns a `Recording` object
with its `data` attribute containing the cut out and replaced array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2+5j, 1+8j, 6+4j, 3+7j, 4+9j]])
>>> new_rec = cut_out(rec)
@ -661,7 +661,7 @@ def patch_shuffle(signal: ArrayLike | Recording, max_patch_size: Optional[int] =
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param max_patch_size: Maximum allowable patch size of the data that can be shuffled. Default is 3.
:type max_patch_size: int, optional
@ -671,7 +671,7 @@ def patch_shuffle(signal: ArrayLike | Recording, max_patch_size: Optional[int] =
:return: A numpy array containing the I and Q data samples with randomly shuffled regions if `signal` is
an array. If `signal` is a `Recording`, returns a `Recording` object with its `data` attribute containing
the shuffled array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2+5j, 1+8j, 6+4j, 3+7j, 4+9j]])
>>> new_rec = patch_shuffle(rec)
@ -679,7 +679,7 @@ def patch_shuffle(signal: ArrayLike | Recording, max_patch_size: Optional[int] =
array([[2+5j, 1+8j, 3+4j, 6+9j, 4+7j]])
"""
if isinstance(signal, Recording):
data = signal.data.copy() # Cannot shuffle read-only array.
data = signal.datatypes.copy() # Cannot shuffle read-only array.
else:
data = np.asarray(signal)

View File

@ -1,5 +1,5 @@
"""
This module comprises various transforms designed to represent signal impairments.
This submodule comprises various transforms designed to represent signal impairments.
These transforms take a recording as input and return a corresponding recording with
the impairment model applied; we call the latter an impaired recording.
@ -15,8 +15,8 @@ import numpy as np
from numpy.typing import ArrayLike
from scipy.signal import resample_poly
from utils.data import Recording
from utils.transforms import iq_augmentations
from ria_toolkit_oss.datatypes import Recording
from ria_toolkit_oss.transforms import iq_augmentations
def add_awgn_to_signal(signal: ArrayLike | Recording, snr: Optional[float] = 1) -> np.ndarray | Recording:
@ -30,7 +30,7 @@ def add_awgn_to_signal(signal: ArrayLike | Recording, snr: Optional[float] = 1)
:param signal: Input IQ data as a complex ``C x N`` array or `Recording`, where ``C`` is the number of channels
and ``N`` is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param snr: The signal-to-noise ratio in dB. Default is 1.
:type snr: float, optional
@ -38,7 +38,7 @@ def add_awgn_to_signal(signal: ArrayLike | Recording, snr: Optional[float] = 1)
:return: A numpy array which is the sum of the noise (which matches the SNR) and the original signal. If `signal`
is a `Recording`, returns a `Recording object` with its `data` attribute containing the noisy signal array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+1j, 2+2j]])
>>> new_rec = add_awgn_to_signal(rec)
@ -72,7 +72,7 @@ def time_shift(signal: ArrayLike | Recording, shift: Optional[int] = 1) -> np.nd
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param shift: The number of indices to shift by. Default is 1.
:type shift: int, optional
@ -81,7 +81,7 @@ def time_shift(signal: ArrayLike | Recording, shift: Optional[int] = 1) -> np.nd
:return: A numpy array which represents the time-shifted signal. If `signal` is a `Recording`,
returns a `Recording object` with its `data` attribute containing the time-shifted array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+1j, 2+2j, 3+3j, 4+4j, 5+5j]])
>>> new_rec = time_shift(rec, -2)
@ -133,7 +133,7 @@ def frequency_shift(signal: ArrayLike | Recording, shift: Optional[float] = 0.5)
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param shift: The frequency shift relative to the sample rate. Must be in the range ``[-0.5, 0.5]``.
Default is 0.5.
:type shift: float, optional
@ -143,7 +143,7 @@ def frequency_shift(signal: ArrayLike | Recording, shift: Optional[float] = 0.5)
:return: A numpy array which represents the frequency-shifted signal. If `signal` is a `Recording`,
returns a `Recording object` with its `data` attribute containing the frequency-shifted array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+1j, 2+2j, 3+3j, 4+4j]])
>>> new_rec = frequency_shift(rec, -0.4)
@ -188,7 +188,7 @@ def phase_shift(signal: ArrayLike | Recording, phase: Optional[float] = np.pi) -
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param phase: The phase angle by which to rotate the IQ samples, in radians. Must be in the range ``[-π, π]``.
Default is π.
:type phase: float, optional
@ -198,7 +198,7 @@ def phase_shift(signal: ArrayLike | Recording, phase: Optional[float] = np.pi) -
:return: A numpy array which represents the phase-shifted signal. If `signal` is a `Recording`,
returns a `Recording object` with its `data` attribute containing the phase-shifted array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+1j, 2+2j, 3+3j, 4+4j]])
>>> new_rec = phase_shift(rec, np.pi/2)
@ -245,7 +245,7 @@ def iq_imbalance(
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param amplitude_imbalance: The IQ amplitude imbalance to apply, in dB. Default is 1.5.
:type amplitude_imbalance: float, optional
:param phase_imbalance: The IQ phase imbalance to apply, in radians. Default is π.
@ -259,7 +259,7 @@ def iq_imbalance(
:return: A numpy array which is the original signal with an applied IQ imbalance. If `signal` is a `Recording`,
returns a `Recording object` with its `data` attribute containing the IQ imbalanced signal array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[2+18j, -34+2j, 3+9j]])
>>> new_rec = iq_imbalance(rec, 1, np.pi, 2)
@ -314,7 +314,7 @@ def resample(signal: ArrayLike | Recording, up: Optional[int] = 4, down: Optiona
:param signal: Input IQ data as a complex CxN array or `Recording`, where C is the number of channels and N
is the length of the IQ examples.
:type signal: array_like or utils.data.Recording
:type signal: array_like or ria_toolkit_oss.datatypes.Recording
:param up: The upsampling factor. Default is 4.
:type up: int, optional
:param down: The downsampling factor. Default is 2.
@ -324,7 +324,7 @@ def resample(signal: ArrayLike | Recording, up: Optional[int] = 4, down: Optiona
:return: A numpy array which represents the resampled signal If `signal` is a `Recording`,
returns a `Recording object` with its `data` attribute containing the resampled array.
:rtype: np.ndarray or utils.data.Recording
:rtype: np.ndarray or ria_toolkit_oss.datatypes.Recording
>>> rec = Recording(data=[[1+1j, 2+2j]])
>>> new_rec = resample(rec, 2, 1)

View File

@ -1,9 +1,3 @@
"""
The Helpers module contains a bunch of helper functions, including array conversion utilities.
"""
__all__ = [
"bytes_to_samples",
]
from .bytes_to_samples import bytes_to_samples

View File

@ -0,0 +1,47 @@
"""
This module defines the tools required to mark class attributes as abstract.
Credit to krassowski: https://stackoverflow.com/questions/23831510/abstract-attribute-not-property.
"""
from abc import ABCMeta
class DummyAttribute:
pass
def abstract_attribute(obj=None):
"""
A utility function used to mark class attributes (not properties, attributes) as abstract. Use with ABCMeta2.
Usage example:
.. code-block:: python
class AbstractFoo(metaclass=ABCMeta2):
bar = abstract_attribute()
"""
if obj is None:
obj = DummyAttribute()
# Mark as abstract.
obj.__is_abstract_attribute__ = True
return obj
class ABCMeta2(ABCMeta):
def __call__(cls, *args, **kwargs):
# Create an instance of the base class and identify abstract attributes based on the presence of the
# __is_abstract_attribute__ attribute being set to True.
instance = ABCMeta.__call__(cls, *args, **kwargs)
abstract_attributes = {
name for name in dir(instance) if getattr(getattr(instance, name), "__is_abstract_attribute__", False)
}
if abstract_attributes:
raise NotImplementedError(
"Cannot instantiate abstract class {} with missing attributes: {}".format(
cls.__name__, ", ".join(abstract_attributes)
)
)
return instance

View File

@ -0,0 +1,4 @@
"""
The package contains assorted plotting and report generation utilities to help visualize RIA components such as
recordings and radio datasets.
"""

View File

@ -4,18 +4,18 @@ import scipy.signal as signal
from plotly.graph_objs import Figure
from scipy.fft import fft, fftshift
from utils.data import Recording
from ria_toolkit_oss.datatypes import Recording
def spectrogram(rec: Recording, thumbnail: bool = False) -> Figure:
"""Create a spectrogram for the recording.
:param rec: Signal to plot.
:type rec: utils.data.Recording
:type rec: ria_toolkit_oss.datatypes.Recording
:param thumbnail: Whether to return a small thumbnail version or full plot.
:type thumbnail: bool
:return: Spectrogram, as a Plotly figure.
:return: Spectrogram, as a Plotly Figure.
"""
complex_signal = rec.data[0]
sample_rate = int(rec.metadata.get("sample_rate", 1))
@ -95,9 +95,9 @@ def iq_time_series(rec: Recording) -> Figure:
"""Create a time series plot of the real and imaginary parts of signal.
:param rec: Signal to plot.
:type rec: utils.data.Recording
:type rec: ria_toolkit_oss.datatypes.Recording
:return: Time series plot as a Plotly figure.
:return: Time series plot, as a Plotly Figure.
"""
complex_signal = rec.data[0]
sample_rate = int(rec.metadata.get("sample_rate", 1))
@ -125,9 +125,9 @@ def frequency_spectrum(rec: Recording) -> Figure:
"""Create a frequency spectrum plot from the recording.
:param rec: Input signal to plot.
:type rec: utils.data.Recording
:type rec: ria_toolkit_oss.datatypes.Recording
:return: Frequency spectrum as a Plotly figure.
:return: Frequency spectrum, as a Plotly figure.
"""
complex_signal = rec.data[0]
center_frequency = int(rec.metadata.get("center_frequency", 0))
@ -160,15 +160,14 @@ def constellation(rec: Recording) -> Figure:
"""Create a constellation plot from the recording.
:param rec: Input signal to plot.
:type rec: utils.data.Recording
:type rec: ria_toolkit_oss.datatypes.Recording
:return: Constellation as a Plotly figure.
:return: Constellation, as a Plotly Figure.
"""
complex_signal = rec.data[0]
# Downsample the IQ samples to a target number of points
# This reduces the amount of data plotted, improving performance and interactivity
# without losing significant detail in the constellation visualization.
# Downsample the IQ samples to a target number of points. This reduces the amount of data plotted,
# improving performance and interactivity without losing significant detail in the constellation visualization.
target_number_of_points = 5000
step = max(1, len(complex_signal) // target_number_of_points)
i_ds = complex_signal.real[::step]