2025-05-20 10:31:35 -04:00
|
|
|
general:
|
|
|
|
# Run mode. Options are 'prod' or 'dev'.
|
|
|
|
run_mode: prod
|
|
|
|
|
|
|
|
dataset:
|
2025-06-13 13:58:35 -04:00
|
|
|
#where to read the recordings from to produce the data set
|
2025-05-21 15:46:59 -04:00
|
|
|
input_dir: data/recordings
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#number of slices you want to split each recording into
|
2025-05-20 10:31:35 -04:00
|
|
|
num_slices: 8
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#training/val split between the 2 data sets
|
2025-05-20 10:31:35 -04:00
|
|
|
train_split: 0.8
|
|
|
|
val_split : 0.2
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#used to initialize a random number generator.
|
2025-05-21 15:46:59 -04:00
|
|
|
seed: 25
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#multiple modulations to contain in the dataset
|
2025-05-21 15:46:59 -04:00
|
|
|
modulation_types: [bpsk, qpsk, qam16, qam64]
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#where to output the datasets
|
2025-05-21 15:46:59 -04:00
|
|
|
output_dir: data/dataset
|
2025-05-20 10:31:35 -04:00
|
|
|
|
|
|
|
training:
|
2025-06-13 13:58:35 -04:00
|
|
|
#number of training samples being processed together before model updates its weights
|
2025-05-26 09:44:53 -04:00
|
|
|
batch_size: 256
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#number of passes through the data set during the training process
|
2025-05-26 09:44:53 -04:00
|
|
|
epochs: 5
|
2025-06-13 13:58:35 -04:00
|
|
|
|
|
|
|
#how much the weights update during training after every batch
|
|
|
|
#suggested range for fine-tuning: (1e-6, 1e-4)
|
2025-05-26 09:44:53 -04:00
|
|
|
learning_rate: 1e-4
|
2025-06-13 13:58:35 -04:00
|
|
|
|
2025-05-20 10:31:35 -04:00
|
|
|
use_gpu: true
|
|
|
|
|
|
|
|
inference:
|
2025-06-13 13:58:35 -04:00
|
|
|
#num classes to classify on
|
2025-05-22 14:12:54 -04:00
|
|
|
num_classes: 4
|
2025-06-13 13:58:35 -04:00
|
|
|
|
2025-05-20 10:31:35 -04:00
|
|
|
|
|
|
|
app:
|
2025-05-21 15:46:59 -04:00
|
|
|
build_dir: dist
|