index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
300
0x1202/dcc72519-63d4-4ed0-84f1-e321b51effa3
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
301
0x1202/e08f7b98-68e5-4e3e-8bab-adb6d18d95a1
mistral
["MistralForCausalLM"]
32000
float32
4.46.0
32
37
2
4
2
gelu
0
False
1,024
10,000
0.000001
0.02
1
2
false
8
2
false
null
null
false
false
false
false
false
false
true
false
true
true
0.000025
small
short
302
0x1202/e0fa7c5d-2783-4018-8d5c-34f40936f7fa
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
3,200
8,640
26
32
32
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
100
1
false
null
null
true
false
false
false
false
false
false
false
false
true
3.19488
medium
medium
303
0x1202/e68227f7-d91e-4e85-b659-16acc2acee6f
phi
["PhiForCausalLM"]
1025
float32
4.46.0
32
37
2
4
4
gelu
0
False
1,024
10,000
null
0.02
0
0
false
8
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
short
304
0x1202/e6985bd7-688e-4025-95b3-ed030fdf8d56
gemma
["GemmaForCausalLM"]
256000
float32
4.46.0
32
2
1
2
1
gelu
0
False
1,024
10,000
0.000001
0.02
2
1
null
16
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.000012
small
short
305
0x1202/e6b10c28-9bbb-459e-9eef-2ef21bf0637b
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
16
64
2
4
4
silu
0
False
2,048
10,000
0.000001
0.02
0
2
false
4
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.000006
small
medium
306
0x1202/e8919087-faab-443a-ac95-d4d7aaf204a0
llama
["LlamaForCausalLM"]
32016
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
307
0x1202/ecb8379e-81b0-4bd5-a690-c05a5108b46a
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128040
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
308
0x1202/eeb10e32-1fe8-4932-aa54-aec02643b3ef
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
309
0x1202/f395675f-57f5-44b9-a122-fabd2b49aae5
gpt_neox
["GPTNeoXForCausalLM"]
50304
float16
4.46.0
512
2,048
6
8
null
gelu
0
False
2,048
10,000
null
0.02
0
0
false
64
null
false
null
null
false
false
true
false
false
false
false
false
false
true
0.018874
small
medium
310
0x1202/f4c2baed-4728-4173-8c69-e6a7b38308b8
mistral
["MistralForCausalLM"]
32032
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
311
0x1202/f5b8abfb-7cc8-4e0a-bab4-432c3caa69a3
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
312
0x1202/f5fc48cf-7f5b-4806-b757-9cb18e0a9c32
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128003
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
313
0x1202/f6858d68-7807-4574-960c-2bab0c778393
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
314
0x1202/faca9496-9018-45a1-8ff1-0d4416fc348f
qwen2_moe
["Qwen2MoeForCausalLM"]
151936
float32
4.46.0
32
22
4
4
2
silu
0
False
1,024
1,000,000
0.000001
0.02
null
151643
false
8
2
false
null
4
false
false
false
false
false
false
false
false
true
true
0.000049
small
short
315
0x1202/fe41553d-e7c6-428c-bb20-31dd9b571928
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
316
0x1202/ff3a4e72-bd99-4500-8a9d-85aaab7ef762
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
317
0x434D/TIR_ControlNet
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
318
0x7o/fialka-7B-v2
llama
["LlamaForCausalLM"]
32000
float32
4.37.0.dev0
4,096
11,008
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
319
0x7o/fialka-7B-v2.1
llama
["LlamaForCausalLM"]
32000
float32
4.37.0.dev0
4,096
11,008
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
320
0x7o/fialka-7B-v3
llama
["LlamaForCausalLM"]
32000
float32
4.37.0.dev0
4,096
11,008
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
321
0x88844451/0fc5c583-fb32-44e0-8747-c8bcceaa606b
mistral
["MistralForCausalLM"]
32000
float32
4.46.0
32
37
2
4
2
gelu
0
False
1,024
10,000
0.000001
0.02
1
2
false
8
2
false
null
null
false
false
false
false
false
false
true
false
true
true
0.000025
small
short
322
0x88844451/1e6e0eb1-667e-4a7f-8af7-34ef79e9d0a4
llama
["LlamaForCausalLM"]
49153
bfloat16
4.46.0
2,048
8,192
24
32
32
silu
0
False
8,192
130,000
0.00001
0.02
1
2
true
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
1.20796
medium
long
323
0x88844451/32790f74-883a-4405-ae47-42955ea80053
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
324
0x88844451/36e6e6b3-7938-4f27-a9d8-a14f9ced33f2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
325
0x88844451/37a97388-02e6-4982-b45e-489de2c0b7f9
phi
["PhiForCausalLM"]
51200
float16
4.46.0
2,048
8,192
24
32
32
gelu_new
0
False
2,048
10,000
null
0.02
null
null
false
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
1.20796
medium
medium
326
0x88844451/3b2f2821-fe03-4a6c-9b07-2d9403b6f27a
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
327
0x88844451/441a5bda-bcfd-4ed2-a2db-0de56f854691
llama
["LlamaForCausalLM"]
49152
bfloat16
4.46.0
960
2,560
32
15
5
silu
0
False
2,048
10,000
0.00001
0.02
1
2
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.353894
small
medium
328
0x88844451/4717e883-2d47-4123-8493-4f7ae59ae91e
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
329
0x88844451/56bf523e-b592-4448-9a49-4ab1ea81e3fe
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
330
0x88844451/784f2a01-3bc4-449e-893b-a1f6192a4287
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
331
0x88844451/7bc74d33-ac4f-49a0-af6e-29bfa1e711ff
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
3,072
8,192
28
24
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
332
0x88844451/85ad06e8-f29b-4898-8fce-6c37f2a7a1fe
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
333
0x88844451/8aeb70b2-6db4-4218-aab5-9f61fc013d69
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
334
0x88844451/9479818c-003f-4ea9-bb89-35113c0acddb
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
335
0x88844451/a480c30c-f6b6-4e85-bcab-8bcf895e3660
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
336
0x88844451/b41ca61e-b0ce-459e-ac0c-5816d174a920
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.46.0
3,072
24,576
28
16
16
gelu
0
False
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
337
0x88844451/bc08a0d6-df80-44e7-a3c2-6bcc53df197d
bloom
["BloomForCausalLM"]
250880
null
4.46.0
1,024
null
null
null
null
null
0
False
null
null
null
0.02
1
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
338
0x88844451/c9d0b71c-a88a-47a2-801a-5efcbce713a0
falcon
["FalconForCausalLM"]
65024
float32
4.46.0
32
null
2
2
null
null
0
False
2,048
10,000
null
0.02
null
11
null
16
null
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
medium
339
0x88844451/d824492e-7b53-4d1a-a14b-aeb84d729360
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
48
32
8
silu
0
False
65,536
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
very_long
340
0x88844451/e43983fc-48db-4d1a-9a13-16d3219d6210
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
341
0x88844451/e768060a-76f9-4016-be7c-ab98c506ff35
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
342
0x88844451/eab498aa-ba83-4480-8e2a-9f60e4c2a3cf
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
343
0x88844451/eaf22e19-3a05-437e-b962-a5717dd59362
llama
["LlamaForCausalLM"]
32002
bfloat16
4.46.0
4,096
14,336
48
32
8
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
medium
344
0x88844451/f2c436b2-21eb-4505-8944-b346b79ff973
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
345
0x88844451/f98b264e-84bb-4d04-8701-2cfffb87ce36
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
48
32
8
silu
0
False
65,536
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
very_long
346
0x9/nous-2b-01-zephyr
stablelm
["StableLmForCausalLM"]
100352
bfloat16
4.38.1
2,048
5,632
24
32
32
silu
0
False
4,096
10,000
null
0.02
100257
100257
false
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
1.20796
medium
medium
347
0xBreath/Meta-Llama-3.1-8B-Instruct-abliterated-q8-mlx
llama
["LlamaForCausalLM"]
128256
bfloat16
4.42.3
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
348
0xC4LL3/A2C-PandaPickAndPlace-v3
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
349
0xC4LL3/A2C-PandaReachDense-v3
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
350
0xC4LL3/POCA_SoccerTwos
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
351
0xC4LL3/PPO-Pyramids
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
352
0xC4LL3/PPO-SnowballTarget
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
353
0xC4LL3/RL-Course_ViZDoom_Health-Gathering-Supreme
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
354
0xEloco/dummy-model
camembert
["CamembertForMaskedLM"]
32005
float32
4.23.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
5
6
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
355
0xFE00/ppo-Huggy
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
356
0xFE00/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
357
0xOracle/neural-chat-fraud-detection-v1
mistral
["MistralForCausalLM"]
32000
float16
4.34.1
4,096
14,336
32
32
8
silu
null
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
358
0xRafu/fine_tune_lab2
phi3
["Phi3ForCausalLM"]
32064
float16
4.46.2
3,072
8,192
32
32
32
silu
0
True
131,072
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
very_long
359
0xSH1V4M/distilroberta-base-sentence-transformer-snli
bert
["BertModel"]
30522
float32
4.40.0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
360
0xSH1V4M/distilroberta-base-sentence-transformer-triplets
bert
["BertModel"]
30522
float32
4.40.0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
361
0xShirin/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-whiskered_quick_otter
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
362
0xasi/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-galloping_amphibious_termite
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.50.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
null
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
363
0xaud/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-wild_dense_caribou
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
364
0xb1/distilbert-base-uncased-finetuned-cola
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.27.4
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
365
0xb1/wav2vec2-base-finetuned-ks
wav2vec2
["Wav2Vec2ForSequenceClassification"]
32
float32
4.27.4
768
3,072
12
12
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
366
0xb1/wav2vec2-base-finetuned-ks-finetuned-ks
wav2vec2
["Wav2Vec2ForSequenceClassification"]
32
float32
4.27.4
768
3,072
12
12
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
367
0xdemon/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-barky_monstrous_mouse
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
368
0xfader/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-energetic_majestic_impala
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.50.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
369
0xfader/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-pawing_meek_anteater
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
370
0xfaskety/Qwen-Qwen1.5-0.5B-1716737399
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
1,024
2,816
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
371
0xfaskety/Qwen-Qwen1.5-0.5B-1716774259
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
1,024
2,816
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
372
0xfaskety/Qwen-Qwen1.5-0.5B-1716791143
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
1,024
2,816
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
373
0xfaskety/Qwen-Qwen1.5-1.8B-1716765290
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
2,048
5,504
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
1.20796
medium
very_long
374
0xfaskety/Qwen-Qwen1.5-1.8B-1716777053
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
2,048
5,504
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
1.20796
medium
very_long
375
0xfaskety/Qwen-Qwen1.5-1.8B-1716794004
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
2,048
5,504
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
1.20796
medium
very_long
376
0xfaskety/Qwen-Qwen1.5-7B-1717028441
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
377
0xfaskety/Qwen-Qwen1.5-7B-1717033726
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
378
0xfaskety/Qwen-Qwen1.5-7B-1717386508
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
379
0xfaskety/Qwen-Qwen1.5-7B-1717391214
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
380
0xfaskety/Qwen-Qwen1.5-7B-1717395906
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
381
0xfaskety/Qwen-Qwen1.5-7B-1717408506
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
382
0xfaskety/Qwen-Qwen1.5-7B-1717413321
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
383
0xfaskety/Qwen-Qwen1.5-7B-1717664907
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.41.1
4,096
11,008
32
32
32
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
384
0xhelios/bert-finetuned-anzen-doc-classification
bert
["BertForSequenceClassification"]
30522
float32
4.46.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
385
0xhzx/nv-qa
t5
["T5ForConditionalGeneration"]
32128
float32
4.40.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
386
0xid/poca-SoccerTwos
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
387
0xk1h0/codegen1-6B-ds-zero3
codegen
["CodeGenForCausalLM"]
51200
float16
4.27.1
null
null
null
null
null
null
null
True
null
null
null
0.02
1
50256
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
388
0xk1h0/codegen2-1B-ds-zero3
codegen
["CodeGenForCausalLM"]
51200
float16
4.32.1
null
null
null
null
null
null
null
True
null
null
null
0.02
1
2
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
389
0xkrm/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
390
0xlexor/genesys_hf
llama
["LlamaForCausalLM"]
128256
float16
4.40.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
391
0xling/Qwen3-0.6B-Gensyn-Swarm-skilled_voracious_gazelle
qwen3
["Qwen3ForCausalLM"]
151936
float32
4.51.3
1,024
3,072
28
16
8
silu
0
True
40,960
1,000,000
0.000001
0.02
151643
151645
true
64
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.352322
small
very_long
392
0xnu/mnist-ocr
image-classification
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
393
0xr3d/vulnerable-ai
gpt2
["GPT2Model"]
50257
float32
4.42.4
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
394
0xroyce/NazareAI-CogniSynergy-DeepSeek
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
395
0xroyce/NazareAI-Senior-Marketing-Strategist
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
396
0xroyce/Plutus-Meta-Llama-3.1-8B-Instruct-bnb-4bit
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
397
0xshaf/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-smooth_webbed_barracuda
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
398
0xshaf/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-toothy_bipedal_dinosaur
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
399
0xsuid/simba-125M
gpt_neo
["GPTNeoForCausalLM"]
50257
float32
4.25.1
768
null
null
null
null
null
0
True
2,048
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
medium