chore: add attnres sweep queue scripts

This commit is contained in:
Logic
2026-03-29 11:18:35 +08:00
parent 185ed6596c
commit 211abbb87f
3 changed files with 87 additions and 0 deletions

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
cd /home/droid/project/diffusion_policy/.worktrees/feat-pusht-imf-attnres
export PYTHONUNBUFFERED=1
export SWANLAB_API_KEY='PSZrBMLx1XAjDjvmhUcNz'
export LD_LIBRARY_PATH="$(printf '%s:' .venv/lib/python3.9/site-packages/nvidia/*/lib | sed 's/:$//')"
run_exp() {
local name="$1" emb="$2" layer="$3"
echo "[$(date '+%F %T')] START $name emb=$emb layer=$layer"
.venv/bin/python train.py \
--config-dir=. \
--config-name=image_pusht_diffusion_policy_dit_imf_attnres_full.yaml \
training.device=cuda:0 \
training.num_epochs=350 \
training.resume=false \
exp_name="$name" \
logging.group=imf_pusht_attnres_arch_sweep \
logging.name="$name" \
logging.resume=false \
logging.id=null \
hydra.run.dir="data/outputs/$name" \
policy.n_emb="$emb" \
policy.n_layer="$layer" \
> "data/run_logs/${name}.log" 2>&1
echo "[$(date '+%F %T')] END $name"
}
run_exp imf_attnres_emb384_layer18_seed42_local 384 18
run_exp imf_attnres_emb256_layer6_seed42_local 256 6
run_exp imf_attnres_emb128_layer6_seed42_local 128 6

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
cd /home/droid/project/diffusion_policy-smoke
export PYTHONUNBUFFERED=1
export SWANLAB_API_KEY='PSZrBMLx1XAjDjvmhUcNz'
export LD_LIBRARY_PATH="$(printf '%s:' .venv/lib/python3.9/site-packages/nvidia/*/lib | sed 's/:$//')"
run_exp() {
local name="$1" emb="$2" layer="$3"
echo "[$(date '+%F %T')] START $name emb=$emb layer=$layer"
.venv/bin/python train.py \
--config-dir=. \
--config-name=image_pusht_diffusion_policy_dit_imf_attnres_full.yaml \
training.device=cuda:0 \
training.num_epochs=350 \
training.resume=false \
exp_name="$name" \
logging.group=imf_pusht_attnres_arch_sweep \
logging.name="$name" \
logging.resume=false \
logging.id=null \
hydra.run.dir="data/outputs/$name" \
policy.n_emb="$emb" \
policy.n_layer="$layer" \
> "data/run_logs/${name}.log" 2>&1
echo "[$(date '+%F %T')] END $name"
}
run_exp imf_attnres_emb384_layer12_seed42_5880gpu0 384 12
run_exp imf_attnres_emb256_layer12_seed42_5880gpu0 256 12
run_exp imf_attnres_emb128_layer12_seed42_5880gpu0 128 12

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
cd /home/droid/project/diffusion_policy-smoke
export PYTHONUNBUFFERED=1
export SWANLAB_API_KEY='PSZrBMLx1XAjDjvmhUcNz'
export LD_LIBRARY_PATH="$(printf '%s:' .venv/lib/python3.9/site-packages/nvidia/*/lib | sed 's/:$//')"
run_exp() {
local name="$1" emb="$2" layer="$3"
echo "[$(date '+%F %T')] START $name emb=$emb layer=$layer"
.venv/bin/python train.py \
--config-dir=. \
--config-name=image_pusht_diffusion_policy_dit_imf_attnres_full.yaml \
training.device=cuda:1 \
training.num_epochs=350 \
training.resume=false \
exp_name="$name" \
logging.group=imf_pusht_attnres_arch_sweep \
logging.name="$name" \
logging.resume=false \
logging.id=null \
hydra.run.dir="data/outputs/$name" \
policy.n_emb="$emb" \
policy.n_layer="$layer" \
> "data/run_logs/${name}.log" 2>&1
echo "[$(date '+%F %T')] END $name"
}
run_exp imf_attnres_emb384_layer6_seed42_5880gpu1 384 6
run_exp imf_attnres_emb256_layer18_seed42_5880gpu1 256 18
run_exp imf_attnres_emb128_layer18_seed42_5880gpu1 128 18