dacorvo HF Staff commited on
Commit
55292d6
·
verified ·
1 Parent(s): d13c96c

Synchronizing local compiler cache.

Browse files
.gitattributes CHANGED
@@ -12385,3 +12385,6 @@ neuronxcc-2.21.18209.0+043b1bf7/MODULE_b21a26069f61a1dc2882+24129607/model.neff
12385
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_1b7bd394fcdc871ad94d+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
12386
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_1b7bd394fcdc871ad94d+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
12387
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_77179bb66fd25af70c12+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
12385
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_1b7bd394fcdc871ad94d+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
12386
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_1b7bd394fcdc871ad94d+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
12387
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_77179bb66fd25af70c12+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
12388
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1243a190b2610c59fb46+a32116a7/model.neff filter=lfs diff=lfs merge=lfs -text
12389
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/model.neff filter=lfs diff=lfs merge=lfs -text
12390
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.1.dev0/llama4_text/meta-llama/Llama-4-Maverick-17B-128E-Instruct/a0b129204c79383ab844.json ADDED
@@ -0,0 +1,189 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 2,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 1048576,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 1,
73
+ 3,
74
+ 5,
75
+ 7,
76
+ 9,
77
+ 11,
78
+ 13,
79
+ 15,
80
+ 17,
81
+ 19,
82
+ 21,
83
+ 23,
84
+ 25,
85
+ 27,
86
+ 29,
87
+ 31,
88
+ 33,
89
+ 35,
90
+ 37,
91
+ 39,
92
+ 41,
93
+ 43,
94
+ 45,
95
+ 47
96
+ ],
97
+ "neuron": {
98
+ "_serialized_key": "NxDNeuronConfig",
99
+ "batch_size": 1,
100
+ "capacity_factor": null,
101
+ "checkpoint_id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
102
+ "checkpoint_revision": "73d14711bcc77c16df3470856949c3764056b617",
103
+ "continuous_batching": false,
104
+ "ep_degree": 1,
105
+ "fused_qkv": false,
106
+ "glu_mlp": true,
107
+ "local_ranks_size": 64,
108
+ "max_batch_size": 1,
109
+ "max_context_length": 4096,
110
+ "max_topk": 256,
111
+ "n_active_tokens": 4096,
112
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
113
+ "on_device_sampling": true,
114
+ "optimum_neuron_version": "0.4.1.dev0",
115
+ "output_logits": false,
116
+ "pp_degree": 1,
117
+ "sequence_length": 4096,
118
+ "speculation_length": 0,
119
+ "start_rank_id": 0,
120
+ "target": "trn2",
121
+ "torch_dtype": "bfloat16",
122
+ "tp_degree": 64
123
+ },
124
+ "no_rope_layers": [
125
+ 1,
126
+ 1,
127
+ 1,
128
+ 0,
129
+ 1,
130
+ 1,
131
+ 1,
132
+ 0,
133
+ 1,
134
+ 1,
135
+ 1,
136
+ 0,
137
+ 1,
138
+ 1,
139
+ 1,
140
+ 0,
141
+ 1,
142
+ 1,
143
+ 1,
144
+ 0,
145
+ 1,
146
+ 1,
147
+ 1,
148
+ 0,
149
+ 1,
150
+ 1,
151
+ 1,
152
+ 0,
153
+ 1,
154
+ 1,
155
+ 1,
156
+ 0,
157
+ 1,
158
+ 1,
159
+ 1,
160
+ 0,
161
+ 1,
162
+ 1,
163
+ 1,
164
+ 0,
165
+ 1,
166
+ 1,
167
+ 1,
168
+ 0,
169
+ 1,
170
+ 1,
171
+ 1,
172
+ 0
173
+ ],
174
+ "num_attention_heads": 40,
175
+ "num_experts_per_tok": 1,
176
+ "num_hidden_layers": 48,
177
+ "num_key_value_heads": 8,
178
+ "num_local_experts": 128,
179
+ "output_router_logits": false,
180
+ "rms_norm_eps": 1e-05,
181
+ "rope_scaling": null,
182
+ "rope_theta": 500000.0,
183
+ "router_aux_loss_coef": 0.001,
184
+ "router_jitter_noise": 0.0,
185
+ "tie_word_embeddings": false,
186
+ "use_cache": true,
187
+ "use_qk_norm": false,
188
+ "vocab_size": 202048
189
+ }
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1243a190b2610c59fb46+a32116a7/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1243a190b2610c59fb46+a32116a7/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1243a190b2610c59fb46+a32116a7/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e078796e03fab813085d4281c67b11bcf8f50123dd1c9d7daf14b8594d58a34c
3
+ size 107209277
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1243a190b2610c59fb46+a32116a7/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a59c4f8cc62b8e5fb734f39f60da0ec0dabfcf3dd9d26cb1e97a481dbc06889
3
+ size 39906304
neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80024ead563fcdcf065aefa0335eea0860603763b9c46e1be33990d04ea33425
3
+ size 103022001
neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:907a586dd5a5f86149f6e1a7c8db33889a49602264815c3c49831776ab8dde74
3
+ size 7005184
neuronxcc-2.21.18209.0+043b1bf7/MODULE_c312ac2d86965b981f71+ac10809c/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2f33450c60fe268482edaa871a7e03430b06d567f814ff81ae7f6ce6919e028
3
+ size 7314549