From 6139a848c3b9d5d6c1322cf8acadf2baacee9e8a Mon Sep 17 00:00:00 2001 From: YurenHao0426 Date: Sun, 5 Apr 2026 16:20:20 -0500 Subject: Add Prompt Tuning and Prefix Tuning baselines - peft_baseline.py: Add PromptTuningConfig (L=5,10,20) and PrefixTuningConfig (L=5,10) - run_all_methods.py: Add 5 new methods to dispatch (prompt_tuning_5/10/20, prefix_tuning_5/10) with per-method directory output structure Prompt Tuning: params = L * H (e.g. 10*1536 = 15360 params = 30KB) Prefix Tuning: params = L * num_layers * 2 * H (much larger) Co-Authored-By: Claude Opus 4.6 (1M context) --- scripts/run_all_methods.py | 8 ++++++++ 1 file changed, 8 insertions(+) (limited to 'scripts') diff --git a/scripts/run_all_methods.py b/scripts/run_all_methods.py index c5eb523..1502ff3 100644 --- a/scripts/run_all_methods.py +++ b/scripts/run_all_methods.py @@ -27,6 +27,7 @@ from adapt.cache_hidden import cache_support_hidden_states from adapt.fit_theta import fit_theta from baselines.peft_baseline import ( PEFTBaseline, get_lora_config, get_tiny_lora_config, get_vera_config, + get_prompt_tuning_config, get_prefix_tuning_config, ) from baselines.bm25_top1 import bm25_select_top1 from baselines.dense_retrieval import DenseRetriever @@ -38,6 +39,8 @@ ALL_METHODS = [ 'base', 'uph', 'prompt_all_k', 'bm25_top1', 'dense_top1', 'profile_based', 'lora', 'tiny_lora', 'vera', + 'prompt_tuning_5', 'prompt_tuning_10', 'prompt_tuning_20', + 'prefix_tuning_5', 'prefix_tuning_10', ] @@ -97,6 +100,11 @@ class MethodRunner: 'lora': lambda *a: self._run_peft(*a, config=get_lora_config(rank=8), lr=1e-4, desc='LoRA r=8'), 'tiny_lora': lambda *a: self._run_peft(*a, config=get_tiny_lora_config(rank=1), lr=1e-4, desc='Tiny LoRA r=1'), 'vera': lambda *a: self._run_peft(*a, config=get_vera_config(rank=256), lr=1e-3, desc='VeRA r=256'), + 'prompt_tuning_5': lambda *a: self._run_peft(*a, config=get_prompt_tuning_config(5), lr=3e-1, desc='PromptTuning L=5'), + 'prompt_tuning_10': lambda *a: self._run_peft(*a, config=get_prompt_tuning_config(10), lr=3e-1, desc='PromptTuning L=10'), + 'prompt_tuning_20': lambda *a: self._run_peft(*a, config=get_prompt_tuning_config(20), lr=3e-1, desc='PromptTuning L=20'), + 'prefix_tuning_5': lambda *a: self._run_peft(*a, config=get_prefix_tuning_config(5), lr=1e-2, desc='PrefixTuning L=5'), + 'prefix_tuning_10': lambda *a: self._run_peft(*a, config=get_prefix_tuning_config(10), lr=1e-2, desc='PrefixTuning L=10'), } if method_name not in dispatch: -- cgit v1.2.3