andito HF Staff commited on
Commit
685879b
·
verified ·
1 Parent(s): b6eacab

Upload nanoVLM using push_to_hub

Browse files
Files changed (2) hide show
  1. config.json +20 -3
  2. model.safetensors +2 -2
config.json CHANGED
@@ -15,8 +15,8 @@
15
  "lm_re_base": 100000,
16
  "lm_max_position_embeddings": 8192,
17
  "lm_base_vocab_size": 49152,
18
- "extra_token_amount": 1,
19
- "lm_vocab_size": 49153,
20
  "lm_n_heads": 15,
21
  "lm_n_kv_heads": 5,
22
  "lm_dropout": 0.0,
@@ -30,8 +30,25 @@
30
  "lm_chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
31
  "mp_pixel_shuffle_factor": 4,
32
  "mp_image_token_length": 64,
 
33
  "vlm_extra_tokens": {
34
- "image_token": "<|image|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  },
36
  "vlm_load_backbone_weights": true,
37
  "vlm_checkpoint_path": "checkpoints",
 
15
  "lm_re_base": 100000,
16
  "lm_max_position_embeddings": 8192,
17
  "lm_base_vocab_size": 49152,
18
+ "extra_token_amount": 17,
19
+ "lm_vocab_size": 49169,
20
  "lm_n_heads": 15,
21
  "lm_n_kv_heads": 5,
22
  "lm_dropout": 0.0,
 
30
  "lm_chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
31
  "mp_pixel_shuffle_factor": 4,
32
  "mp_image_token_length": 64,
33
+ "max_img_size": 512,
34
  "vlm_extra_tokens": {
35
+ "image_token": "<|image|>",
36
+ "r1c1": "<row_1_col_1>",
37
+ "r1c2": "<row_1_col_2>",
38
+ "r1c3": "<row_1_col_3>",
39
+ "r1c4": "<row_1_col_4>",
40
+ "r2c1": "<row_2_col_1>",
41
+ "r2c2": "<row_2_col_2>",
42
+ "r2c3": "<row_2_col_3>",
43
+ "r2c4": "<row_2_col_4>",
44
+ "r3c1": "<row_3_col_1>",
45
+ "r3c2": "<row_3_col_2>",
46
+ "r3c3": "<row_3_col_3>",
47
+ "r3c4": "<row_3_col_4>",
48
+ "r4c1": "<row_4_col_1>",
49
+ "r4c2": "<row_4_col_2>",
50
+ "r4c3": "<row_4_col_3>",
51
+ "r4c4": "<row_4_col_4>"
52
  },
53
  "vlm_load_backbone_weights": true,
54
  "vlm_checkpoint_path": "checkpoints",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:181966c24cde0c20903be111156cf3f68b316f2f33dde3f5e030f71fd3b95d94
3
- size 1840254904
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcb7199d21d41d1ada921a9932d7af7c6ee65772750992a26f3ddd47556bcf9a
3
+ size 1840316344