dataset
stringlengths
5
115
config
stringlengths
1
162
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
0
77.9k
null_count
int64
0
62.9M
null_proportion
float64
0
1
min
int64
0
9.25M
max
int64
0
1.07B
mean
float64
0
90.4M
median
float64
0
80.1M
std
float64
0
130M
histogram
dict
partial
bool
2 classes
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_num_theory_hard
latest
154
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
2024_07_21T07_14_53.671049
193
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
2024_07_21T07_14_53.671049
193
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
2024_07_21T07_14_53.671049
193
target
0
0
1
20
4.68394
3
4.33957
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20 ], "hist": [ 87, 52, 10, 3, 6, 16, 13, 4, 1, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
2024_07_21T07_14_53.671049
193
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
latest
193
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
latest
193
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
latest
193
target
0
0
1
20
4.68394
3
4.33957
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20 ], "hist": [ 87, 52, 10, 3, 6, 16, 13, 4, 1, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_prealgebra_hard
latest
193
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_geometric_shapes
2024_07_21T07_14_53.671049
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_geometric_shapes
2024_07_21T07_14_53.671049
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_geometric_shapes
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_geometric_shapes
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
2024_07_21T07_14_53.671049
135
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
2024_07_21T07_14_53.671049
135
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
2024_07_21T07_14_53.671049
135
target
0
0
1
51
12.45926
11
12.49284
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 51 ], "hist": [ 57, 36, 12, 10, 1, 9, 5, 4, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
2024_07_21T07_14_53.671049
135
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
latest
135
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
latest
135
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
latest
135
target
0
0
1
51
12.45926
11
12.49284
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 51 ], "hist": [ 57, 36, 12, 10, 1, 9, 5, 4, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_precalculus_hard
latest
135
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_mmlu_pro
latest
12,032
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_mmlu_pro
latest
12,032
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_mmlu_pro
2024_07_21T07_14_53.671049
12,032
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_mmlu_pro
2024_07_21T07_14_53.671049
12,032
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
2024_07_21T07_14_53.671049
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
2024_07_21T07_14_53.671049
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
2024_07_21T07_14_53.671049
250
target
0
0
3
10
5.9
6
1.51896
{ "bin_edges": [ 3, 4, 5, 6, 7, 8, 9, 10, 10 ], "hist": [ 16, 27, 57, 66, 49, 22, 11, 2 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
2024_07_21T07_14_53.671049
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
latest
250
target
0
0
3
10
5.9
6
1.51896
{ "bin_edges": [ 3, 4, 5, 6, 7, 8, 9, 10, 10 ], "hist": [ 16, 27, 57, 66, 49, 22, 11, 2 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_murder_mysteries
latest
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
2024_07_21T07_14_53.671049
280
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
2024_07_21T07_14_53.671049
280
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
2024_07_21T07_14_53.671049
280
target
0
0
1
41
7.19643
4
6.56323
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 155, 39, 59, 17, 7, 1, 1, 0, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
2024_07_21T07_14_53.671049
280
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
latest
280
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
latest
280
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
latest
280
target
0
0
1
41
7.19643
4
6.56323
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 155, 39, 59, 17, 7, 1, 1, 0, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_math_intermediate_algebra_hard
latest
280
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_movie_recommendation
2024_07_21T07_14_53.671049
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_movie_recommendation
2024_07_21T07_14_53.671049
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_movie_recommendation
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_bbh_movie_recommendation
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
2024_07_21T07_14_53.671049
256
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
2024_07_21T07_14_53.671049
256
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
2024_07_21T07_14_53.671049
256
target
0
0
3
21
10.20312
11
4.32477
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 21 ], "hist": [ 33, 42, 19, 31, 41, 49, 22, 11, 7, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
2024_07_21T07_14_53.671049
256
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
latest
256
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
latest
256
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
latest
256
target
0
0
3
21
10.20312
11
4.32477
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 21 ], "hist": [ 33, 42, 19, 31, 41, 49, 22, 11, 7, 1 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_object_placements
latest
256
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
2024_07_21T07_14_53.671049
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
2024_07_21T07_14_53.671049
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
2024_07_21T07_14_53.671049
250
target
0
0
37
89
58.508
58
10.91377
{ "bin_edges": [ 37, 43, 49, 55, 61, 67, 73, 79, 85, 89 ], "hist": [ 17, 30, 43, 54, 52, 29, 12, 9, 4 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
2024_07_21T07_14_53.671049
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
latest
250
target
0
0
37
89
58.508
58
10.91377
{ "bin_edges": [ 37, 43, 49, 55, 61, 67, 73, 79, 85, 89 ], "hist": [ 17, 30, 43, 54, 52, 29, 12, 9, 4 ] }
false
open-llm-leaderboard/VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct-details
VAGOsolutions__Llama-3-SauerkrautLM-70b-Instruct__leaderboard_musr_team_allocation
latest
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
TalonMG/NL2Cypher-SpCQL
default
test
2,007
answer
0
0
10
14,256,105
12,384.25112
63
331,401.2712
{ "bin_edges": [ 10, 1425620, 2851230, 4276840, 5702450, 7128060, 8553670, 9979280, 11404890, 12830500, 14256105 ], "hist": [ 2005, 0, 1, 0, 0, 0, 0, 0, 0, 1 ] }
false
TalonMG/NL2Cypher-SpCQL
default
test
2,007
cypher
0
0
43
287
107.56652
101
31.80293
{ "bin_edges": [ 43, 68, 93, 118, 143, 168, 193, 218, 243, 268, 287 ], "hist": [ 110, 665, 536, 493, 103, 53, 38, 2, 1, 6 ] }
false
TalonMG/NL2Cypher-SpCQL
default
test
2,007
query
0
0
6
124
24.63627
23
9.8798
{ "bin_edges": [ 6, 18, 30, 42, 54, 66, 78, 90, 102, 114, 124 ], "hist": [ 437, 1096, 364, 75, 29, 5, 0, 0, 0, 1 ] }
false
TalonMG/NL2Cypher-SpCQL
default
train
7,001
answer
0
0
10
7,305,592
6,938.27053
62
149,580.25425
{ "bin_edges": [ 10, 730569, 1461128, 2191687, 2922246, 3652805, 4383364, 5113923, 5844482, 6575041, 7305592 ], "hist": [ 6985, 5, 6, 1, 2, 0, 0, 0, 0, 2 ] }
false
TalonMG/NL2Cypher-SpCQL
default
train
7,001
cypher
0
0
41
313
107.96001
101
33.36412
{ "bin_edges": [ 41, 69, 97, 125, 153, 181, 209, 237, 265, 293, 313 ], "hist": [ 365, 2756, 2039, 1304, 274, 180, 34, 2, 43, 4 ] }
false
TalonMG/NL2Cypher-SpCQL
default
train
7,001
query
0
0
6
113
24.57249
23
9.70846
{ "bin_edges": [ 6, 17, 28, 39, 50, 61, 72, 83, 94, 105, 113 ], "hist": [ 1245, 3660, 1577, 332, 149, 25, 6, 4, 1, 2 ] }
false
TalonMG/NL2Cypher-SpCQL
default
validation
1,000
answer
0
0
10
4,772,073
10,482.575
67
182,377.07147
{ "bin_edges": [ 10, 477217, 954424, 1431631, 1908838, 2386045, 2863252, 3340459, 3817666, 4294873, 4772073 ], "hist": [ 996, 2, 0, 0, 0, 0, 1, 0, 0, 1 ] }
false
TalonMG/NL2Cypher-SpCQL
default
validation
1,000
cypher
0
0
43
282
108.471
103
33.79641
{ "bin_edges": [ 43, 67, 91, 115, 139, 163, 187, 211, 235, 259, 282 ], "hist": [ 51, 298, 265, 261, 56, 41, 18, 2, 1, 7 ] }
false
TalonMG/NL2Cypher-SpCQL
default
validation
1,000
query
0
0
6
89
24.346
23
9.81349
{ "bin_edges": [ 6, 15, 24, 33, 42, 51, 60, 69, 78, 87, 89 ], "hist": [ 115, 417, 310, 108, 26, 16, 5, 1, 1, 1 ] }
false
ebayes/uhura-eval-harness
am_generation
test
77
question
0
0
23
861
163.94805
56
259.20368
{ "bin_edges": [ 23, 107, 191, 275, 359, 443, 527, 611, 695, 779, 861 ], "hist": [ 64, 0, 0, 0, 1, 3, 0, 0, 0, 9 ] }
false
ebayes/uhura-eval-harness
am_multiple_choice
test
77
question
0
0
23
861
163.94805
56
259.20368
{ "bin_edges": [ 23, 107, 191, 275, 359, 443, 527, 611, 695, 779, 861 ], "hist": [ 64, 0, 0, 0, 1, 3, 0, 0, 0, 9 ] }
false
ebayes/uhura-eval-harness
ha_multiple_choice
test
155
question
0
0
14
1,925
778.39355
822
740.44698
{ "bin_edges": [ 14, 206, 398, 590, 782, 974, 1166, 1358, 1550, 1742, 1925 ], "hist": [ 76, 1, 0, 0, 5, 4, 17, 12, 25, 15 ] }
false
ebayes/uhura-eval-harness
ha_generation
test
155
question
0
0
14
1,925
778.39355
822
740.44698
{ "bin_edges": [ 14, 206, 398, 590, 782, 974, 1166, 1358, 1550, 1742, 1925 ], "hist": [ 76, 1, 0, 0, 5, 4, 17, 12, 25, 15 ] }
false
ebayes/uhura-eval-harness
nso_generation
test
14
question
0
0
24
1,161
192.28571
47
361.63973
{ "bin_edges": [ 24, 138, 252, 366, 480, 594, 708, 822, 936, 1050, 1161 ], "hist": [ 12, 0, 0, 0, 0, 0, 0, 1, 0, 1 ] }
false
ebayes/uhura-eval-harness
nso_multiple_choice
test
14
question
0
0
24
1,161
192.28571
47
361.63973
{ "bin_edges": [ 24, 138, 252, 366, 480, 594, 708, 822, 936, 1050, 1161 ], "hist": [ 12, 0, 0, 0, 0, 0, 0, 1, 0, 1 ] }
false
ebayes/uhura-eval-harness
sw_generation
test
23
question
0
0
145
4,011
3,720.52174
3,917
787.74228
{ "bin_edges": [ 145, 532, 919, 1306, 1693, 2080, 2467, 2854, 3241, 3628, 4011 ], "hist": [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 22 ] }
false
ebayes/uhura-eval-harness
sw_multiple_choice
test
23
question
0
0
145
4,011
3,720.52174
3,917
787.74228
{ "bin_edges": [ 145, 532, 919, 1306, 1693, 2080, 2467, 2854, 3241, 3628, 4011 ], "hist": [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 22 ] }
false
ebayes/uhura-eval-harness
yo_generation
test
258
question
0
0
423
2,108
1,102.83721
889.5
516.11801
{ "bin_edges": [ 423, 592, 761, 930, 1099, 1268, 1437, 1606, 1775, 1944, 2108 ], "hist": [ 27, 77, 42, 22, 5, 0, 13, 37, 13, 22 ] }
false
ebayes/uhura-eval-harness
yo_multiple_choice
test
258
question
0
0
423
2,108
1,102.83721
889.5
516.11801
{ "bin_edges": [ 423, 592, 761, 930, 1099, 1268, 1437, 1606, 1775, 1944, 2108 ], "hist": [ 27, 77, 42, 22, 5, 0, 13, 37, 13, 22 ] }
false
ebayes/uhura-eval-harness
zu_generation
test
61
question
0
0
19
904
108.98361
52
192.01341
{ "bin_edges": [ 19, 108, 197, 286, 375, 464, 553, 642, 731, 820, 904 ], "hist": [ 52, 4, 0, 1, 1, 0, 0, 0, 0, 3 ] }
false
ebayes/uhura-eval-harness
zu_multiple_choice
test
61
question
0
0
19
904
108.98361
52
192.01341
{ "bin_edges": [ 19, 108, 197, 286, 375, 464, 553, 642, 731, 820, 904 ], "hist": [ 52, 4, 0, 1, 1, 0, 0, 0, 0, 3 ] }
false
mayankchugh-learning/RAG-investment-recommendation-log
default
train
39
model_response
0
0
169
12,927
1,433.97436
1,177
2,052.29332
{ "bin_edges": [ 169, 1445, 2721, 3997, 5273, 6549, 7825, 9101, 10377, 11653, 12927 ], "hist": [ 26, 10, 2, 0, 0, 0, 0, 0, 0, 1 ] }
false
mayankchugh-learning/RAG-investment-recommendation-log
default
train
39
retrieved_context
0
0
0
12,794
3,173.51282
0
5,258.68479
{ "bin_edges": [ 0, 1280, 2560, 3840, 5120, 6400, 7680, 8960, 10240, 11520, 12794 ], "hist": [ 28, 0, 0, 0, 1, 0, 0, 2, 0, 8 ] }
false
mayankchugh-learning/RAG-investment-recommendation-log
default
train
39
user_input
0
0
8
150
107.94872
137
42.16443
{ "bin_edges": [ 8, 23, 38, 53, 68, 83, 98, 113, 128, 143, 150 ], "hist": [ 2, 1, 1, 1, 9, 1, 0, 0, 17, 7 ] }
false
beier3/umltocode
default
train
105
input
0
0
214
2,016
969.19048
908
457.04553
{ "bin_edges": [ 214, 395, 576, 757, 938, 1119, 1300, 1481, 1662, 1843, 2016 ], "hist": [ 11, 9, 15, 18, 18, 11, 7, 6, 5, 5 ] }
false
beier3/umltocode
default
train
105
output
0
0
431
4,370
1,761.14286
1,612
822.13191
{ "bin_edges": [ 431, 825, 1219, 1613, 2007, 2401, 2795, 3189, 3583, 3977, 4370 ], "hist": [ 12, 9, 32, 23, 15, 5, 0, 4, 1, 4 ] }
false
jdpressman/retroinstruct-mix-v0.2
default
train
23,938
inputs
0
0
20
24,720
2,131.93784
695.5
2,701.16398
{ "bin_edges": [ 20, 2491, 4962, 7433, 9904, 12375, 14846, 17317, 19788, 22259, 24720 ], "hist": [ 17330, 1971, 2818, 1725, 93, 0, 0, 0, 0, 1 ] }
false
jdpressman/retroinstruct-mix-v0.2
default
train
23,938
targets
0
0
2
14,194
2,700.8494
1,947
2,187.66368
{ "bin_edges": [ 2, 1422, 2842, 4262, 5682, 7102, 8522, 9942, 11362, 12782, 14194 ], "hist": [ 10623, 2714, 3288, 4426, 2571, 163, 87, 50, 12, 4 ] }
false
jdpressman/retroinstruct-mix-v0.2
default
validation
2,660
inputs
0
0
30
10,647
2,120.40639
643
2,688.25845
{ "bin_edges": [ 30, 1092, 2154, 3216, 4278, 5340, 6402, 7464, 8526, 9588, 10647 ], "hist": [ 1495, 311, 222, 94, 60, 118, 159, 132, 52, 17 ] }
false
jdpressman/retroinstruct-mix-v0.2
default
validation
2,660
targets
0
0
2
13,382
2,675.7203
1,954.5
2,154.32752
{ "bin_edges": [ 2, 1341, 2680, 4019, 5358, 6697, 8036, 9375, 10714, 12053, 13382 ], "hist": [ 1167, 327, 314, 504, 312, 25, 5, 5, 0, 1 ] }
false
5CD-AI/Vietnamese-AilinWhiteNight-ranker_thv2-gg-translated
default
test
6,795
query_thai
0
0
12
273
83.21398
71
48.67232
{ "bin_edges": [ 12, 39, 66, 93, 120, 147, 174, 201, 228, 255, 273 ], "hist": [ 1349, 1753, 1177, 884, 772, 463, 271, 102, 12, 12 ] }
false
5CD-AI/Vietnamese-AilinWhiteNight-ranker_thv2-gg-translated
default
test
6,795
query_vi
0
0
10
316
93.9897
81
54.87792
{ "bin_edges": [ 10, 41, 72, 103, 134, 165, 196, 227, 258, 289, 316 ], "hist": [ 1174, 1795, 1231, 962, 758, 488, 300, 60, 21, 6 ] }
false
5CD-AI/Vietnamese-AilinWhiteNight-ranker_thv2-gg-translated
default
validation
4,030
query_thai
0
0
7
255
76.46179
62
46.68192
{ "bin_edges": [ 7, 32, 57, 82, 107, 132, 157, 182, 207, 232, 255 ], "hist": [ 526, 1333, 683, 464, 392, 342, 173, 87, 27, 3 ] }
false
5CD-AI/Vietnamese-AilinWhiteNight-ranker_thv2-gg-translated
default
validation
4,030
query_vi
0
0
11
279
85.52878
70
52.32904
{ "bin_edges": [ 11, 38, 65, 92, 119, 146, 173, 200, 227, 254, 279 ], "hist": [ 747, 1122, 636, 499, 393, 331, 185, 78, 30, 9 ] }
false
5CD-AI/Vietnamese-AilinWhiteNight-ranker_thv2-gg-translated
default
train
398,398
query_thai
0
0
4
1,729
115.06437
99
81.42693
{ "bin_edges": [ 4, 177, 350, 523, 696, 869, 1042, 1215, 1388, 1561, 1729 ], "hist": [ 325762, 67222, 4395, 675, 207, 62, 42, 21, 6, 6 ] }
false
5CD-AI/Vietnamese-AilinWhiteNight-ranker_thv2-gg-translated
default
train
398,398
query_vi
0
0
1
1,703
113.94432
98
79.86298
{ "bin_edges": [ 1, 172, 343, 514, 685, 856, 1027, 1198, 1369, 1540, 1703 ], "hist": [ 322295, 70567, 4557, 637, 203, 67, 42, 21, 0, 9 ] }
false
Gutema/image-insta
default
train
109
text
0
0
37
117
80.57798
82
15.89858
{ "bin_edges": [ 37, 46, 55, 64, 73, 82, 91, 100, 109, 117 ], "hist": [ 2, 1, 12, 22, 17, 24, 18, 9, 4 ] }
false
We1ltbummler/taitan_test
default
train
632
Image URL
0
0
78
81
79.54589
80
0.83947
{ "bin_edges": [ 78, 79, 80, 81, 81 ], "hist": [ 143, 2, 486, 1 ] }
false
We1ltbummler/taitan_test
default
train
632
Model Number
0
0
6
46
11.74525
12
2.62862
{ "bin_edges": [ 6, 11, 16, 21, 26, 31, 36, 41, 46, 46 ], "hist": [ 113, 496, 18, 4, 0, 0, 0, 0, 1 ] }
false