modrec-workflow/.riahub/workflows/workflow.yaml

123 lines
3.4 KiB
YAML
Raw Normal View History

name: Modulation Recognition Demo
2025-05-14 15:40:08 -04:00
on:
push:
branches: [main]
pull_request:
branches: [main]
2025-05-14 15:40:08 -04:00
jobs:
ria-demo:
runs-on: ubuntu-latest-2080
steps:
- name: Print GPU information
run: |
if command -v nvidia-smi &> /dev/null; then
echo "✅ NVIDIA GPU is available"
nvidia-smi
else
echo "⚠️ No NVIDIA GPU found"
fi
- name: Checkout project code
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies (incl. RIA Hub utils)
run: |
set -e
python -m pip install --upgrade pip
echo "Trying to install utils from RIA Hub..."
2025-08-22 09:42:16 -04:00
pip install \
--index-url "https://${{ secrets.RIAHUB_USER }}:${{ secrets.RIAHUB_TOKEN }}@git.riahub.ai/api/packages/qoherent/pypi/simple/" \
2025-08-22 09:42:16 -04:00
utils
pip install -r requirements.txt
2025-06-17 15:45:26 -04:00
2025-08-21 10:47:25 -04:00
- name: 1. Generate Recordings
run: |
mkdir -p data/recordings
PYTHONPATH=. python scripts/dataset_manager/data_gen.py --output-dir data/recordings
- name: 📦 Compress Recordings
run: tar -czf recordings.tar.gz -C data/recordings .
2025-06-18 10:23:11 -04:00
- name: ⬆️ Upload recordings
2025-08-21 10:49:04 -04:00
uses: actions/upload-artifact@v3
2025-06-13 13:58:35 -04:00
with:
name: recordings
path: recordings.tar.gz
2025-06-13 13:58:35 -04:00
- name: 2. Build HDF5 Dataset
2025-05-26 12:04:20 -04:00
run: |
mkdir -p data/dataset
PYTHONPATH=. python scripts/dataset_manager/produce_dataset.py
2025-05-26 12:04:20 -04:00
shell: bash
- name: ⬆️ Upload Dataset
uses: actions/upload-artifact@v3
with:
2025-06-16 09:29:08 -04:00
name: dataset
2025-06-18 10:23:11 -04:00
path: data/dataset/**
2025-06-13 13:58:35 -04:00
- name: 3. Train Model
2025-05-23 10:06:46 -04:00
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
2025-06-13 15:03:42 -04:00
mkdir -p checkpoint_files
PYTHONPATH=. python scripts/model_builder/train.py 2>/dev/null
2025-06-18 10:14:24 -04:00
- name: 4. Plot Model
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
2025-06-18 10:14:24 -04:00
run: |
PYTHONPATH=. python scripts/model_builder/plot_data.py 2>/dev/null
- name: ⬆️ Upload Checkpoints
uses: actions/upload-artifact@v3
with:
2025-06-16 09:29:08 -04:00
name: checkpoints
2025-06-16 09:39:13 -04:00
path: checkpoint_files/*
2025-05-14 15:40:08 -04:00
- name: 5. Export model to ONNX graph
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
2025-06-13 15:03:42 -04:00
mkdir -p onnx_files
MKL_DISABLE_FAST_MM=1 PYTHONPATH=. python scripts/application_packager/convert_to_onnx.py 2>/dev/null
- name: ⬆️ Upload ONNX file
uses: actions/upload-artifact@v3
with:
2025-06-16 13:43:59 -04:00
name: onnx-file
path: onnx_files/inference_recognition_model.onnx
2025-05-26 10:28:11 -04:00
- name: 6. Profile ONNX model
2025-05-26 10:28:11 -04:00
run: |
PYTHONPATH=. python scripts/application_packager/profile_onnx.py
- name: ⬆️ Upload JSON trace
2025-05-26 10:28:11 -04:00
uses: actions/upload-artifact@v3
with:
name: profile-data
path: "**/onnxruntime_profile_*.json"
- name: 7. Convert ONNX graph to an ORT file
run: |
PYTHONPATH=. python scripts/application_packager/convert_to_ort.py
- name: ⬆️ Upload ORT file
uses: actions/upload-artifact@v3
with:
2025-06-16 13:43:59 -04:00
name: ort-file
path: ort_files/inference_recognition_model.ort