index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
400
0xtinuviel/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-amphibious_exotic_gull
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.2
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
401
0xtinuviel/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-deadly_yawning_emu
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.2
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
402
0xtinuviel/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-freckled_jumping_bobcat
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
403
0xtinuviel/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-rough_gliding_armadillo
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
404
0xtinuviel/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-wily_pale_impala
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.2
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
405
0xtinuviel/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-winged_shrewd_condor
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.2
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
406
0xtinuviel/Qwen2.5-1.5B-Instruct-Gensyn-Swarm-eager_padded_bison
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
1,536
8,960
28
12
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
407
0xyf/gpt2-windows-log-QnA-text-gen
gpt2
["GPT2LMHeadModel"]
3461
float32
4.46.2
null
null
null
null
null
null
null
True
null
null
null
0.02
0
0
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
408
0xyf/sysmon-QnA-gpt2-text-gen
gpt2
["GPT2LMHeadModel"]
1867
float32
4.44.2
null
null
null
null
null
null
null
True
null
null
null
0.02
0
0
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
409
0xzksnark/gemma-3-4b-it-cybersecurity-merged
gemma3
["Gemma3ForConditionalGeneration"]
null
bfloat16
4.51.3
null
null
null
null
null
null
null
null
null
null
null
0.02
2
106
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
410
0ybo/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
411
0ys/mt5-small-finetuned-amazon-en-es
mt5
["MT5ForConditionalGeneration"]
250112
float32
4.22.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
412
1-13-am/deberta-pii-finetuned
deberta-v2
["DebertaV2ForTokenClassification"]
128100
float32
4.37.2
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
413
1-13-am/distilbert-base-uncased-finetuned-emotion
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.36.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
414
1-13-am/xlm-roberta-base-finetuned-panx-de-fr
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.37.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
415
1-13-am/xlm-roberta-base-pii-finetuned
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.37.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
416
1-800-SHARED-TASKS/llama3.1-8b-ChipsalSubtaskA-16bit
llama
["LlamaForCausalLM"]
128256
bfloat16
4.44.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
417
1-lock/0328eca8-8a54-4906-8813-8cb56b4ac8c7
llama
["LlamaForCausalLM"]
49152
bfloat16
4.46.0
2,048
8,192
24
32
32
silu
0
False
2,048
10,000
0.00001
0.02
0
0
true
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
1.20796
medium
medium
418
1-lock/090c305e-f6b0-4f40-a8e6-396f0bf24bf3
mistral
["MistralForCausalLM"]
32003
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
long
419
1-lock/0ad7d4ad-f79e-490c-8ae1-ca2db3df3dcc
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.46.0
3,072
24,576
28
16
16
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
420
1-lock/0ffdac01-fb3b-4cff-a490-aee966862d58
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
null
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
421
1-lock/122d5385-0eea-49a8-9b5d-dc2cef3737c7
llama
["LlamaForCausalLM"]
32256
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
16,384
100,000
0.000001
0.02
32013
32021
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
422
1-lock/17a1d6a1-1cd5-481a-9d60-cedcaaa1973c
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
423
1-lock/19f7f2b9-6192-4c75-8fc5-4fff7b20251e
llama
["LlamaForCausalLM"]
49153
bfloat16
4.46.0
576
1,536
30
9
3
silu
0
False
8,192
100,000
0.00001
0.041667
0
0
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.119439
small
long
424
1-lock/1b8ab1e0-6218-4b84-9c24-f74892620058
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
425
1-lock/1d04373e-3697-4945-b027-2f834ada86f6
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,024
2,816
24
16
16
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
426
1-lock/1e28d0a3-fee0-4776-bae8-9806d5b0ca9b
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
3,200
8,640
26
32
32
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
100
1
false
null
null
true
false
false
false
false
false
false
false
false
true
3.19488
medium
medium
427
1-lock/267607c4-3f58-4ff8-ad72-625352a06ab8
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
428
1-lock/2891ceeb-beb7-4a0c-8669-fa856b437fdb
llama
["LlamaForCausalLM"]
49153
bfloat16
4.46.0
960
2,560
32
15
5
silu
0
False
8,192
100,000
0.00001
0.02
0
0
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.353894
small
long
429
1-lock/2a919bf9-8aeb-42bd-8a27-875c6696d1c4
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
430
1-lock/2df9b646-96c2-4838-92a4-8e8462b7a32e
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
431
1-lock/30147f10-8d51-4cfd-a137-1994bdd438ae
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
432
1-lock/3070ad16-bfaf-4809-bcfd-56bd1e38e058
mistral
["MistralForCausalLM"]
32003
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
long
433
1-lock/30a500d3-cd2b-4610-ab96-31bc80ec47cb
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
434
1-lock/32ec391e-4e16-44f3-832e-6e3456469e36
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
435
1-lock/339f21eb-7ff0-4748-ad1c-0ca01305bd67
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
436
1-lock/345d1e12-c209-4e7d-afb5-8138d8318506
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
3,072
8,192
28
24
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
437
1-lock/350909ed-66f4-4b93-9578-322a3d627b24
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
438
1-lock/35dead03-4931-4dae-9c16-b98c876f0b5a
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
4,096
10,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
medium
439
1-lock/43a55ece-4d2a-4b3e-93a7-5a47c3c45264
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
4,096
14,336
48
32
8
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
medium
440
1-lock/44544341-e391-4abb-ae67-3d314764166a
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
441
1-lock/4718293d-286e-444f-a9df-7c6cc3aa5937
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
442
1-lock/498b62f0-6bf1-431e-b65c-8a37347f9c24
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.46.0
3,072
24,576
28
16
16
gelu
0
False
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
443
1-lock/4bcc03c8-df85-4a35-aec2-a35701f2914d
phi3
["Phi3ForCausalLM"]
32064
float32
4.46.0
32
64
2
4
4
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
8
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
medium
444
1-lock/4bfc3447-f9d8-4ba1-932b-c6d2484e0e24
llama
["LlamaForCausalLM"]
128257
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128003
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
445
1-lock/4e6709b9-19a1-45f4-b76a-1fe6c1c79d57
qwen2_moe
["Qwen2MoeForCausalLM"]
151936
float32
4.46.0
32
22
4
4
2
silu
0
False
2,048
1,000,000
0.000001
0.02
null
151643
false
8
2
false
null
4
false
false
false
false
false
false
false
false
true
true
0.000049
small
medium
446
1-lock/50baae52-d2e5-4dcb-9131-85e6450f4477
llama
["LlamaForCausalLM"]
49152
bfloat16
4.46.0
960
2,560
32
15
5
silu
0
False
2,048
10,000
0.00001
0.02
1
2
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.353894
small
medium
447
1-lock/5737beba-a6cf-4751-a823-f1b80f1b9204
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.0
8
32
2
4
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
true
2
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.000002
small
very_long
448
1-lock/57626320-3282-484f-a3e4-9c2dd57520b6
qwen2_moe
["Qwen2MoeForCausalLM"]
151936
float32
4.46.0
32
22
4
4
2
silu
0
False
2,048
1,000,000
0.000001
0.02
null
151643
false
8
2
false
null
4
false
false
false
false
false
false
false
false
true
true
0.000049
small
medium
449
1-lock/593ff86e-d5f0-40b5-96ea-1cbe764a6df4
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
450
1-lock/59b01df7-a80d-4f6c-8d3a-af766145fb91
llama
["LlamaForCausalLM"]
32016
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
451
1-lock/5ce582a9-3802-42b9-a0e7-4f4150eb979f
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
452
1-lock/5cfacbdc-e852-40fb-a08e-e77f88e3e83b
llama
["LlamaForCausalLM"]
128257
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128003
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
453
1-lock/5df4662f-50a8-4645-9fe9-6682aa695712
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
4,096
10,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
medium
454
1-lock/5f366943-d239-46f4-b30e-c619898dfc7c
llama
["LlamaForCausalLM"]
32016
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
455
1-lock/5f923b8d-820c-4502-aa5a-ceb34a24935f
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
456
1-lock/5fa37ae5-8e6d-4d21-a714-20c1d974027b
falcon
["FalconForCausalLM"]
65024
float32
4.46.0
128
null
2
16
null
null
0
False
2,048
10,000
null
0.02
null
11
null
8
null
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000393
small
medium
457
1-lock/6109bbeb-f9fb-474d-9158-e6c1172a6fd4
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
458
1-lock/622425bc-ad22-4c33-a6a2-b8c74c4985ec
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
459
1-lock/64a591c1-0af9-4292-9e18-4a52755c506b
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
null
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
460
1-lock/696ac27f-be3e-49c0-a0ca-4bed0a05c306
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.0
8
32
2
4
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
true
2
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.000002
small
very_long
461
1-lock/6a2a58b9-c8d1-49df-8b64-a5fdfe1a34a4
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
2,048
8,192
16
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
462
1-lock/6febe1b0-ba43-4bb7-90c8-4074091a5ea2
phi
["PhiForCausalLM"]
1025
float32
4.46.0
32
37
2
4
4
gelu
0
False
2,048
10,000
null
0.02
0
0
false
8
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
medium
463
1-lock/7fa7519a-dce7-4294-aef9-4608ed122d8d
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
464
1-lock/8017b487-5c02-4f40-a0b2-7e3ef8e21eb3
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
465
1-lock/803afd8b-310f-4251-b5d4-b8902e7d5d0e
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
466
1-lock/819315ce-4b97-4266-8a64-a89ab7b59896
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,024
2,816
24
16
16
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
467
1-lock/832f0834-9b10-4688-bd9e-38a69c5d39a0
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
468
1-lock/83d67e69-f2f8-4525-aa10-dc3c28b1fc5e
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
469
1-lock/85cf41cb-2f92-46e4-a695-31d1b95c9127
llama
["LlamaForCausalLM"]
49153
bfloat16
4.46.0
960
2,560
32
15
5
silu
0
False
8,192
100,000
0.00001
0.02
0
0
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.353894
small
long
470
1-lock/8b31fb2b-8aa0-46dd-a1d7-4bb72ccbd49e
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
471
1-lock/8c3d3379-37c7-4f91-aa8f-205443de47d1
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
472
1-lock/8d04d9ea-e0f3-4c6e-961f-a0a74f997d39
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
473
1-lock/8e20aad4-3010-401a-8e2e-b0bdfdcfd3e0
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
474
1-lock/8ff46554-84f4-4b4c-869a-c054fd78523c
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
4,096
10,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
medium
475
1-lock/91232878-9d55-47b2-a7d6-901b52faf3c3
llama
["LlamaForCausalLM"]
32016
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
476
1-lock/9d14bd20-fd9f-4e8e-b29e-c502d03c3a8f
llama
["LlamaForCausalLM"]
50964
float32
4.46.0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
477
1-lock/abe55978-66c5-46cb-a2cc-1b348b9a4608
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
478
1-lock/af12f959-2447-4124-ba37-e9e3e6beee86
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
4,096
10,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
medium
479
1-lock/b15010a7-2af6-415e-afbb-1d484f234108
mistral
["MistralForCausalLM"]
32002
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
long
480
1-lock/b1c97d78-e953-49ed-859d-0c2603e7f887
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
4,096
10,000
0.000001
0.02
null
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
medium
481
1-lock/b24ba81f-7ef4-4f85-aa06-3581ac9f0c76
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
2,048
8,192
16
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
482
1-lock/b2bdce73-b029-4c0d-9cab-4425ac192934
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.46.0
3,072
24,576
28
16
16
gelu
0
False
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
483
1-lock/b53b32a3-20d6-4f6d-bf5d-f4833d13909e
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
484
1-lock/b7854b60-d08f-4bc6-91ef-cbc321950656
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.46.0
3,072
24,576
28
16
16
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
485
1-lock/b85a4e3d-93c8-46f3-92a5-8e718064e026
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
486
1-lock/b85e7eb7-ae1f-4f5c-9aaa-41434520ef05
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
487
1-lock/b8d95989-54fa-4b5c-a7d7-31724cf3ca3a
phi3
["Phi3ForCausalLM"]
32064
float32
4.46.0
32
64
2
4
4
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
8
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
medium
488
1-lock/ba808ad5-6897-453a-96e8-65d7ed43d9f8
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
489
1-lock/ba8ae684-f491-4f6a-832a-cb3830f68587
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
490
1-lock/be912e6b-b041-4d11-b547-b6ffc6b67ee9
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
491
1-lock/bf47f1aa-5ee9-47e2-a417-a945a208e52d
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
492
1-lock/c5351754-3ca4-40ec-b6a1-8a4be680a0ae
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
493
1-lock/c5aa696e-b261-4d7b-80b4-292d1a774c12
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
null
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
494
1-lock/c70a8a3b-2b79-41b4-9a65-1dc23cac10a8
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
495
1-lock/c7fd6fbc-21ee-427d-9e62-0bbc3fac5a22
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
496
1-lock/cba88447-0d96-4d0c-9d0f-7e97dc847b67
phi3
["Phi3ForCausalLM"]
32064
bfloat16
4.46.0
3,072
8,192
32
32
32
silu
0
False
131,072
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
very_long
497
1-lock/cbb1f466-752c-485e-a59f-e9572b87753a
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
null
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
498
1-lock/d4e9b050-e8f3-4296-87c5-18730bfd1b2f
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
499
1-lock/dcad3d02-89dc-4c27-b8a5-eb4a96e70e57
falcon
["FalconForCausalLM"]
65024
float32
4.46.0
32
null
2
2
null
null
0
False
2,048
10,000
null
0.02
null
11
null
16
null
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
medium