File tree 1 file changed +36
-0
lines changed
llm_on_ray/finetune/models 1 file changed +36
-0
lines changed Original file line number Diff line number Diff line change
1
+ General :
2
+ base_model : mistralai/Mixtral-8x7B-Instruct-v0.1
3
+ gpt_base_model : false
4
+ output_dir : /tmp/llm-ray/output
5
+ checkpoint_dir : /tmp/llm-ray/checkpoint
6
+ config :
7
+ trust_remote_code : false
8
+ use_auth_token : null
9
+ lora_config :
10
+ task_type : CAUSAL_LM
11
+ r : 8
12
+ lora_alpha : 32
13
+ lora_dropout : 0.1
14
+ target_modules :
15
+ - q_proj
16
+ - v_proj
17
+ enable_gradient_checkpointing : false
18
+ Dataset :
19
+ train_file : examples/data/sample_finetune_data_small.jsonl
20
+ validation_file : null
21
+ validation_split_percentage : 5
22
+ Training :
23
+ optimizer : AdamW
24
+ batch_size : 2
25
+ epochs : 3
26
+ learning_rate : 1.0e-05
27
+ lr_scheduler : linear
28
+ weight_decay : 0.0
29
+ mixed_precision : bf16
30
+ device : CPU
31
+ num_training_workers : 2
32
+ resources_per_worker :
33
+ CPU : 2
34
+ accelerate_mode : CPU_DDP
35
+ gradient_accumulation_steps : 1
36
+ logging_steps : 10
You can’t perform that action at this time.
0 commit comments