yilunhao commited on
Commit
30f05a5
·
verified ·
1 Parent(s): 2231f1f

Training in progress, step 100

Browse files
adapter_config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": [
3
+ 36769,
4
+ 22703,
5
+ 2277,
6
+ 259,
7
+ 60
8
+ ],
9
+ "alpha_pattern": {},
10
+ "arrow_config": null,
11
+ "auto_mapping": null,
12
+ "base_model_name_or_path": "openai/gpt-oss-20b",
13
+ "bias": "none",
14
+ "corda_config": null,
15
+ "ensure_weight_tying": false,
16
+ "eva_config": null,
17
+ "exclude_modules": null,
18
+ "fan_in_fan_out": false,
19
+ "inference_mode": true,
20
+ "init_lora_weights": true,
21
+ "layer_replication": null,
22
+ "layers_pattern": null,
23
+ "layers_to_transform": null,
24
+ "loftq_config": {},
25
+ "lora_alpha": 32,
26
+ "lora_bias": false,
27
+ "lora_dropout": 0.05,
28
+ "megatron_config": null,
29
+ "megatron_core": "megatron.core",
30
+ "modules_to_save": null,
31
+ "peft_type": "LORA",
32
+ "peft_version": "0.18.0.rc0",
33
+ "qalora_group_size": 16,
34
+ "r": 32,
35
+ "rank_pattern": {},
36
+ "revision": null,
37
+ "target_modules": [
38
+ "k_proj",
39
+ "down_proj",
40
+ "v_proj",
41
+ "o_proj",
42
+ "q_proj",
43
+ "up_proj",
44
+ "gate_proj"
45
+ ],
46
+ "target_parameters": null,
47
+ "task_type": "CAUSAL_LM",
48
+ "trainable_token_indices": null,
49
+ "use_dora": false,
50
+ "use_qalora": false,
51
+ "use_rslora": false
52
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bdfd1e26996d8932d6246c2c788df5af0ca938b147b8d4a1114f9079f00c5d1
3
+ size 63726760
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b45472a73137b6a4db6c16260d331c7c48d523ba179d6308a3202b936a2373d5
3
+ size 5841