forked from Qi-YOU/CEGE0049-GSS-Dassl-CoOp
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathrun-comparison.sh
More file actions
200 lines (165 loc) · 6.53 KB
/
run-comparison.sh
File metadata and controls
200 lines (165 loc) · 6.53 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
#!/bin/bash
# Compare CLIP MHAdapter with baseline models (ZeroshotCLIP, Linear_Probe, CoOp, CLIP_Adapter).
# Runs each model on selected datasets with fixed configs, logs performance, and
# produces a summary for side-by-side evaluation.
# ======= Summary log setup =======
# For Linux users (uncomment these lines and comment the Windows ones
mkdir -p /root/autodl-tmp/results/
summary_file="/root/autodl-tmp/results/comparison_summary.txt"
# For Windows users (uncomment these lines and comment the Linux ones)
# mkdir -p ../autodl-tmp/results/
# summary_file="../autodl-tmp/results/comparison_summary.txt"
echo "======== TRAINING SUMMARY ========" > "$summary_file"
echo "Start time: $(date)" >> "$summary_file"
echo "" >> "$summary_file"
function run_experiment() {
local trainer=$1
local dataset=$2
local config=$3
local loss=${4:-""}
local class_weight=${5:-""}
local blend_ratio=${6:-""}
local num_heads=${7:-""}
local seed=${8:-42}
local extra_args=${9:-""} # <-- holds things like "--eval-only" or "TRAINER.COOP.N_CTX 8"
local blend_tag=""
if [[ -n "$blend_ratio" ]]; then
blend_tag=${blend_ratio/./}
else
blend_tag="NULL"
fi
# --------------------------------------------
# Set output directory path for Linux or Windows
# IMPORTANT: Manually comment/uncomment the appropriate line below based on your OS.
# --------------------------------------------
# For Linux users (uncomment this line and comment the Windows one)
local outdir="/root/autodl-tmp/results/${dataset}/${trainer}-clip-vitb16"
# For Windows users (uncomment this line and comment the Linux one)
# local outdir="../autodl-tmp/results/${dataset}/${trainer}-clip-vitb16"
if [[ -n "$loss" ]]; then
outdir+="-loss_${loss}"
fi
if [[ -n "$class_weight" ]]; then
outdir+="-cw_${class_weight}"
fi
if [[ -n "$blend_ratio" ]]; then
outdir+="-br_${blend_tag}"
fi
if [[ -n "$num_heads" ]]; then
outdir+="-mh_${num_heads}"
fi
if [[ -n "$seed" ]]; then
outdir+="-sd_${seed}"
fi
# ---- Detect n_ctx from extra_args if present ----
if [[ "$extra_args" =~ TRAINER\.COOP\.N_CTX[[:space:]]+([0-9]+) ]]; then
n_ctx_val="${BASH_REMATCH[1]}"
outdir+="-n_ctx_${n_ctx_val}"
fi
# --------------------------------------------
# Execute the experiment...
# --------------------------------------------
echo "Running dataset=${dataset} with trainer=${trainer}..."
echo "=== Config: ${config} | Loss: ${loss:-none} | Class Weighting: ${class_weight:-none} | Blend Ratio: ${blend_ratio:-none} | Num Heads: ${num_heads:-none} | Seed: ${seed} ==="
# Record start time
local start_time=$(date +%s)
# Construct Running command
local cmd="python CoOp/train.py \
--trainer ${trainer} \
--dataset-config-file CoOp/configs/datasets/${dataset}.yaml \
--config-file configs/${config}.yaml \
--output-dir ${outdir} \
--seed ${seed} $extra_args"
if [[ -n "$loss" ]]; then
cmd+=" TRAINER.LOSS.NAME ${loss}"
fi
if [[ -n "$class_weight" ]]; then
cmd+=" TRAINER.LOSS.CLASS_WEIGHTING ${class_weight}"
fi
if [[ -n "$blend_ratio" ]]; then
cmd+=" MODEL.BLEND_RATIO ${blend_ratio}"
fi
if [[ -n "$num_heads" ]]; then
cmd+=" MODEL.NUM_HEADS ${num_heads}"
fi
# Echo and execute the command
echo ">> Running command:"
echo "$cmd"
eval "$cmd"
# Record end time and calculate elapsed time
local end_time=$(date +%s)
local elapsed=$((end_time - start_time))
# Precompute hours, minutes, seconds to avoid redundant calculations
local hours=$((elapsed / 3600))
local minutes=$(((elapsed % 3600) / 60))
local seconds=$((elapsed % 60))
# Print to stdout
printf "Elapsed time for %s (trainer=%s, loss=%s, weight=%s, br=%s, heads=%s): %02d:%02d:%02d\n\n" \
"$dataset" "$trainer" "${loss:-none}" "${class_weight:-none}" "${blend_ratio:-none}" "${num_heads:-none}" \
"$hours" "$minutes" "$seconds"
# Append to summary file (without Params line)
{
echo "[$dataset] ${trainer}"
printf "Elapsed: %02d:%02d:%02d\n" "$hours" "$minutes" "$seconds"
echo ""
} >> "$summary_file"
}
# ==================================================================== #
# Pre-defined pass-in arguments
datasets=("glare" "lighting_condition" "pano_status" "platform" "quality" "reflection" "view_direction" "weather")
class_weights=("inverse" "uniform")
trainers_order=("ZeroR_Trainer" "ZeroshotCLIP" "Linear_Probe" "CoOp" "CLIP_Adapter" "CLIP_MHAdapter")
# Dataset-specific optimal parameters for CLIP_MHAdapter, determined through
# comprehensive grid search analysis with best macro f1-score performance (via run-grid-search.sh)
declare -A mhadapter_params
mhadapter_params["glare"]="ce uniform 0.8 4"
mhadapter_params["lighting_condition"]="ce inverse 0.8 8"
mhadapter_params["pano_status"]="ce uniform 0.2 4"
mhadapter_params["platform"]="ce uniform 0.8 4"
mhadapter_params["quality"]="ce inverse 0.2 16"
mhadapter_params["reflection"]="ce inverse 0.8 8"
mhadapter_params["view_direction"]="ce inverse 0.8 4"
mhadapter_params["weather"]="ce uniform 0.2 8"
# Iterate through trainers over datasets in order
for dataset in "${datasets[@]}"; do
for trainer in "${trainers_order[@]}"; do
# Select config file base name based on trainer type
if [[ "$trainer" == "CLIP_MHAdapter" ]]; then
config="vit_b16-adamw" # Use AdamW config for CLIP_MHAdapter
else
config="vit_b16-sgd" # Use SGD config for other trainers
fi
# Fixed random seed for all experiments
seed=42
# Handle different trainers differently
case "$trainer" in
"ZeroR_Trainer" | "ZeroshotCLIP")
run_experiment "$trainer" "$dataset" "$config" "" "" "" "" "$seed" "--eval-only"
;;
"Linear_Probe")
for cw in "${class_weights[@]}"; do
run_experiment "$trainer" "$dataset" "$config" "ce" "$cw" "" "" "$seed"
done
;;
"CoOp")
run_experiment "$trainer" "$dataset" "$config" "" "" "" "" "$seed" "TRAINER.COOP.N_CTX 8"
;;
"CLIP_Adapter")
for cw in "${class_weights[@]}"; do
run_experiment "$trainer" "$dataset" "$config" "ce" "$cw" "" "" "$seed"
done
;;
"CLIP_MHAdapter")
params=(${mhadapter_params[$dataset]})
loss="${params[0]}"
class_weight="${params[1]}"
blend_ratio="${params[2]}"
num_heads="${params[3]}"
run_experiment "$trainer" "$dataset" "$config" "$loss" "$class_weight" "$blend_ratio" "$num_heads" "$seed"
;;
*)
echo "Warning: Unknown trainer name '$trainer'"
;;
esac # <----- this closes the case statement
done
done