modrec-workflow/.riahub/workflows/workflow.yaml

121 lines
3.2 KiB
YAML

name: RIA Hub Workflow Demo
on:
push:
branches:
[main]
pull_request:
branches:
[main]
jobs:
ria-demo:
runs-on: ubuntu-latest-2080
steps:
- name: Print GPU information
run: |
if command -v nvidia-smi &> /dev/null; then
echo "✅ NVIDIA GPU is available"
nvidia-smi
else
echo "⚠️ No NVIDIA GPU found"
fi
- name: Checkout code
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: 1. Generate Recordings
run: |
mkdir -p data/recordings
PYTHONPATH=. python scripts/dataset_building/data_gen.py --output-dir data/recordings
echo "recordings produced successfully"
- name: Upload Recordings
uses: actions/upload-artifact@v3
with:
name: recordings
path: data/recordings/**
- name: 2. Build HDF5 Dataset
run: |
mkdir -p data/dataset
PYTHONPATH=. python scripts/dataset_building/produce_dataset.py
echo "datasets produced successfully"
shell: bash
- name: Upload Dataset Artifacts
uses: actions/upload-artifact@v3
with:
name: dataset
path: data/dataset/**
- name: 3. Train Model
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
mkdir -p checkpoint_files
PYTHONPATH=. python scripts/training/train.py
echo "training model"
- name: Upload Checkpoints
uses: actions/upload-artifact@v3
with:
name: checkpoints
path: checkpoint_files/*
- name: List checkpoint directory
run: ls -lh checkpoint_files
- name: 4. Convert to ONNX file
run: |
mkdir -p onnx_files
MKL_DISABLE_FAST_MM=1 PYTHONPATH=. python onnx_scripts/convert_to_onnx.py
echo "building inference app"
- name: Upload ONNX file
uses: actions/upload-artifact@v3
with:
name: ria-demo-onnx
path: onnx_files/inference_recognition_model.onnx
- name: 5. Profile ONNX model
run: |
PYTHONPATH=. python onnx_scripts/profile_onnx.py
- name: Upload JSON profiling data
uses: actions/upload-artifact@v3
with:
name: profile-data
path: '**/onnxruntime_profile_*.json'
- name: 6. Convert to ORT file
run: |
python -m onnxruntime.tools.convert_onnx_models_to_ort \
/workspace/qoherent/modrec-workflow/onnx_files/inference_recognition_model.onnx \
--output_dir ort_files \
--optimization_style Fixed \
--target_platform amd64
- name: Upload ORT file
uses: actions/upload-artifact@v3
with:
name: ria-demo-ort
path: ort_files/inference_recognition_model.ort