Paul Bridger commited on
Commit
0341a51
1 Parent(s): 241a9d7
Files changed (2) hide show
  1. meta_model_latest.pt +3 -0
  2. training_config.yml +76 -0
meta_model_latest.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42f82b2f7bc7f5252025921029a4201a2d169bceac1137b2853cc61c42def1d0
3
+ size 16060618198
training_config.yml ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ identity_token: 0 1 2
2
+ model:
3
+ _component_: models.lora_mmllama3_8b
4
+ lora_attn_modules:
5
+ - q_proj
6
+ - v_proj
7
+ apply_lora_to_mlp: false
8
+ apply_lora_to_output: false
9
+ lora_rank: 8
10
+ lora_alpha: 16
11
+ perception_tokens: 2
12
+ use_clip: false
13
+ tokenizer:
14
+ _component_: models.a2a_tokenizer
15
+ path: checkpoints/Meta-Llama-3-8B-Instruct/original/tokenizer.model
16
+ checkpointer:
17
+ _component_: torchtune.utils.FullModelMetaCheckpointer
18
+ checkpoint_dir: checkpoints/Meta-Llama-3-8B-Instruct/original/
19
+ checkpoint_files:
20
+ - consolidated.00.pth
21
+ adapter_checkpoint: null
22
+ recipe_checkpoint: null
23
+ output_dir: output_checkpoints/experiment_4
24
+ model_type: LLAMA3
25
+ resume_from_checkpoint: false
26
+ interim_checkpoint_steps: 1500000
27
+ interim_gen_steps: null
28
+ max_new_tokens: 100
29
+ temperature: 0.6
30
+ top_k: 300
31
+ dataset:
32
+ _component_: ds.EvenBatcher
33
+ dataset:
34
+ _component_: ds.RoundRobinDataset
35
+ datasets:
36
+ - _component_: ds.IdentityDataset
37
+ identity: ${identity_token}
38
+ length: 250000
39
+ train_on_input: true
40
+ seed: null
41
+ shuffle: true
42
+ batch_size: 4
43
+ optimizer:
44
+ _component_: torch.optim.AdamW
45
+ weight_decay: 0.01
46
+ lr: 0.0003
47
+ lr_scheduler:
48
+ _component_: torchtune.modules.get_cosine_schedule_with_warmup
49
+ num_warmup_steps: 100
50
+ loss:
51
+ _component_: torch.nn.CrossEntropyLoss
52
+ epochs: 1
53
+ max_steps_per_epoch: null
54
+ gradient_accumulation_steps: 64
55
+ compile: false
56
+ output_dir: /tmp/lora_finetune_output
57
+ metric_logger:
58
+ _component_: torchtune.utils.metric_logging.DiskLogger
59
+ log_dir: ${output_dir}
60
+ log_every_n_steps: null
61
+ device: cuda
62
+ dtype: bf16
63
+ enable_activation_checkpointing: false
64
+ profiler:
65
+ _component_: torchtune.utils.profiler
66
+ enabled: false
67
+ inference:
68
+ prompt_template: 'Video:
69
+
70
+ {video}
71
+
72
+ Caption the previous video.'
73
+ max_new_tokens: 300
74
+ temperature: 0.6
75
+ top_k: 300
76
+ quantizer: null