Add llama_finetune_boolq_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model 3cd6fc7 verified mciccone commited on Jun 10
Add llama_finetune_boolq_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model be69d11 verified mciccone commited on Jun 10
Add llama_finetune_boolq_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 7c6b886 verified mciccone commited on Jun 10
Add llama_finetune_boolq_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model 859a6ea verified mciccone commited on Jun 10
Add llama_finetune_boolq_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model a740625 verified mciccone commited on Jun 10
Add llama_finetune_boolq_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 29481cf verified mciccone commited on Jun 10