feat: add mamba and dynamic chunking related code and test code
This commit is contained in:
259
scripts/classification/vanillaMamba_classification.sh
Normal file
259
scripts/classification/vanillaMamba_classification.sh
Normal file
@ -0,0 +1,259 @@
|
||||
#!/bin/bash
|
||||
|
||||
# vanillaMamba Classification Training Script for Multiple Datasets
|
||||
export CUDA_VISIBLE_DEVICES=0
|
||||
|
||||
model_name=vanillaMamba
|
||||
|
||||
# Create results directory if it doesn't exist
|
||||
mkdir -p ./results
|
||||
|
||||
# UWaveGestureLibrary dataset (seq_len=315, enc_in=3) - use Copy1 config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/UWaveGestureLibrary/ \
|
||||
--model_id UWaveGestureLibrary \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 2 \
|
||||
--batch_size 64 \
|
||||
--seq_len 315 \
|
||||
--enc_in 3 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 128 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_UWaveGestureLibrary' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.002 \
|
||||
--train_epochs 150 \
|
||||
--patience 30 \
|
||||
--revin 0 | tee ./results/vanillaMamba_UWaveGestureLibrary.log
|
||||
|
||||
# EthanolConcentration dataset (seq_len=1751, enc_in=3) - use Copy1 config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 3 \
|
||||
--root_path ./dataset/EthanolConcentration/ \
|
||||
--model_id EthanolConcentration \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 2 \
|
||||
--batch_size 64 \
|
||||
--seq_len 1751 \
|
||||
--enc_in 4 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_EthanolConcentration' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 200 \
|
||||
--patience 30 \
|
||||
--revin 0 | tee ./results/vanillaMamba_EthanolConcentration.log
|
||||
|
||||
# Handwriting dataset (seq_len=152, enc_in=3) - use Copy1 config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/Handwriting/ \
|
||||
--model_id Handwriting \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 4 \
|
||||
--batch_size 64 \
|
||||
--seq_len 152 \
|
||||
--enc_in 3 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_Handwriting' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 200 \
|
||||
--patience 30 \
|
||||
--revin 0 | tee ./results/vanillaMamba_Handwriting.log
|
||||
|
||||
# JapaneseVowels dataset (seq_len=29, enc_in=12) - use Copy1 config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/JapaneseVowels/ \
|
||||
--model_id JapaneseVowels \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 64 \
|
||||
--seq_len 29 \
|
||||
--enc_in 12 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_JapaneseVowels' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.0005 \
|
||||
--train_epochs 100 \
|
||||
--patience 30 \
|
||||
--revin 0 | tee ./results/vanillaMamba_JapaneseVowels.log
|
||||
|
||||
# PEMS-SF dataset (seq_len=144, enc_in=963) - use Copy1 config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/PEMS-SF/ \
|
||||
--model_id PEMS-SF \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 16 \
|
||||
--seq_len 144 \
|
||||
--enc_in 963 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_PEMS-SF' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 150 \
|
||||
--patience 30 \
|
||||
--revin 0 | tee ./results/vanillaMamba_PEMS-SF.log
|
||||
|
||||
# Heartbeat dataset (seq_len=405, enc_in=61) - use original config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/Heartbeat/ \
|
||||
--model_id Heartbeat \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 64 \
|
||||
--seq_len 405 \
|
||||
--enc_in 61 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_Heartbeat' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 150 \
|
||||
--patience 10 \
|
||||
--revin 0 | tee ./results/vanillaMamba_Heartbeat.log
|
||||
|
||||
# FaceDetection dataset (seq_len=62, enc_in=144) - use original config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/FaceDetection/ \
|
||||
--model_id FaceDetection \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 64 \
|
||||
--seq_len 62 \
|
||||
--enc_in 144 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_FaceDetection' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 100 \
|
||||
--patience 10 \
|
||||
--revin 0 | tee ./results/vanillaMamba_FaceDetection.log
|
||||
|
||||
# SelfRegulationSCP1 dataset (seq_len=896, enc_in=6) - use original config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/SelfRegulationSCP1/ \
|
||||
--model_id SelfRegulationSCP1 \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 64 \
|
||||
--seq_len 896 \
|
||||
--enc_in 6 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_SelfRegulationSCP1' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 100 \
|
||||
--patience 10 \
|
||||
--revin 0 | tee ./results/vanillaMamba_SelfRegulationSCP1.log
|
||||
|
||||
# SelfRegulationSCP2 dataset (seq_len=1152, enc_in=7) - use original config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/SelfRegulationSCP2/ \
|
||||
--model_id SelfRegulationSCP2 \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 64 \
|
||||
--seq_len 1152 \
|
||||
--enc_in 7 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_SelfRegulationSCP2' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 100 \
|
||||
--patience 10 \
|
||||
--revin 0 | tee ./results/vanillaMamba_SelfRegulationSCP2.log
|
||||
|
||||
# SpokenArabicDigits dataset (seq_len=93, enc_in=13) - use original config
|
||||
python -u run.py \
|
||||
--task_name classification \
|
||||
--is_training 1 \
|
||||
--root_path ./dataset/SpokenArabicDigits/ \
|
||||
--model_id SpokenArabicDigits \
|
||||
--model $model_name \
|
||||
--data UEA \
|
||||
--e_layers 3 \
|
||||
--batch_size 64 \
|
||||
--seq_len 93 \
|
||||
--enc_in 13 \
|
||||
--d_model 128 \
|
||||
--d_state 64 \
|
||||
--d_conv 4 \
|
||||
--expand 2 \
|
||||
--headdim 64 \
|
||||
--dropout 0.1 \
|
||||
--des 'vanillaMamba_SpokenArabicDigits' \
|
||||
--itr 1 \
|
||||
--learning_rate 0.001 \
|
||||
--train_epochs 100 \
|
||||
--patience 10 \
|
||||
--revin 0 | tee ./results/vanillaMamba_SpokenArabicDigits.log
|
Reference in New Issue
Block a user