modrec-workflow/.riahub/workflows/workflow.yaml

131 lines
3.3 KiB
YAML
Raw Normal View History

2025-05-14 15:40:08 -04:00
name: RIA Hub Workflow Demo
on:
push:
branches:
[main]
pull_request:
branches:
[main]
2025-05-14 15:40:08 -04:00
jobs:
ria-demo:
runs-on: ubuntu-latest-2080
2025-06-17 15:45:26 -04:00
env:
RIAGIT_USERNAME: ${{ secrets.USERNAME }}
RIAGIT_TOKEN: ${{ secrets.TOKEN }}
steps:
- name: Print GPU information
run: |
if command -v nvidia-smi &> /dev/null; then
echo "✅ NVIDIA GPU is available"
nvidia-smi
else
echo "⚠️ No NVIDIA GPU found"
fi
- name: Checkout code
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
2025-06-18 10:11:56 -04:00
pip install -r requirements.txt
2025-06-17 15:45:26 -04:00
2025-06-13 13:58:35 -04:00
- name: 1. Generate Recordings
run: |
mkdir -p data/recordings
2025-06-13 14:20:32 -04:00
PYTHONPATH=. python scripts/dataset_building/data_gen.py --output-dir data/recordings
2025-06-13 13:58:35 -04:00
echo "recordings produced successfully"
2025-06-18 10:23:11 -04:00
- name: ⬆️ Upload recordings
2025-06-13 13:58:35 -04:00
uses: actions/upload-artifact@v3
with:
name: recordings
2025-06-18 10:23:11 -04:00
path: data/recordings/**
2025-06-13 13:58:35 -04:00
- name: 2. Build HDF5 Dataset
2025-05-26 12:04:20 -04:00
run: |
mkdir -p data/dataset
2025-06-13 13:58:35 -04:00
PYTHONPATH=. python scripts/dataset_building/produce_dataset.py
2025-05-26 12:04:20 -04:00
echo "datasets produced successfully"
shell: bash
2025-06-18 10:23:11 -04:00
- name: 📤 Upload Dataset
uses: actions/upload-artifact@v3
with:
2025-06-16 09:29:08 -04:00
name: dataset
2025-06-18 10:23:11 -04:00
path: data/dataset/**
2025-06-13 13:58:35 -04:00
- name: 3. Train Model
2025-05-23 10:06:46 -04:00
env:
NO_NNPACK: 1
2025-05-23 10:10:07 -04:00
PYTORCH_NO_NNPACK: 1
run: |
2025-06-13 15:03:42 -04:00
mkdir -p checkpoint_files
2025-06-13 13:58:35 -04:00
PYTHONPATH=. python scripts/training/train.py
echo "training model"
2025-06-18 10:14:24 -04:00
- name: 4. Plot Model
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
PYTHONPATH=. python scripts/training/plot_data.py
- name: Upload Checkpoints
uses: actions/upload-artifact@v3
with:
2025-06-16 09:29:08 -04:00
name: checkpoints
2025-06-16 09:39:13 -04:00
path: checkpoint_files/*
2025-05-14 15:40:08 -04:00
2025-06-16 10:05:01 -04:00
2025-06-13 13:58:35 -04:00
- name: 4. Convert to ONNX file
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
2025-06-13 15:03:42 -04:00
mkdir -p onnx_files
2025-06-16 13:43:59 -04:00
MKL_DISABLE_FAST_MM=1 PYTHONPATH=. python scripts/onnx/convert_to_onnx.py
echo "building inference app"
- name: Upload ONNX file
uses: actions/upload-artifact@v3
with:
2025-06-16 13:43:59 -04:00
name: onnx-file
path: onnx_files/inference_recognition_model.onnx
2025-05-26 10:28:11 -04:00
2025-06-16 10:05:01 -04:00
- name: List checkpoint directory
2025-06-16 10:07:35 -04:00
run: ls -lh onnx_files
2025-06-16 10:05:01 -04:00
2025-06-13 13:58:35 -04:00
- name: 5. Profile ONNX model
2025-05-26 10:28:11 -04:00
run: |
2025-06-18 10:14:24 -04:00
PYTHONPATH=. python scripts/onnx/profile_onnx.py
2025-05-26 10:28:11 -04:00
- name: Upload JSON profiling data
uses: actions/upload-artifact@v3
with:
name: profile-data
path: '**/onnxruntime_profile_*.json'
2025-06-13 13:58:35 -04:00
- name: 6. Convert to ORT file
run: |
2025-06-18 13:52:10 -04:00
PYTHONPATH=. python scripts/ort/convert_to_ort.py
- name: Upload ORT file
uses: actions/upload-artifact@v3
with:
2025-06-16 13:43:59 -04:00
name: ort-file
path: ort_files/inference_recognition_model.ort