dataset
stringlengths
5
115
config
stringlengths
1
162
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
0
77.9k
null_count
int64
0
62.9M
null_proportion
float64
0
1
min
int64
0
9.25M
max
int64
0
1.07B
mean
float64
0
90.4M
median
float64
0
80.1M
std
float64
0
130M
histogram
dict
partial
bool
2 classes
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_date_understanding
2024_07_22T09_32_37.006648
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_date_understanding
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_date_understanding
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_snarks
2024_07_22T09_32_37.006648
178
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_snarks
2024_07_22T09_32_37.006648
178
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_snarks
latest
178
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_snarks
latest
178
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_five_objects
2024_07_22T09_32_37.006648
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_five_objects
2024_07_22T09_32_37.006648
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_five_objects
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_five_objects
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_logical_deduction_seven_objects
2024_07_22T09_32_37.006648
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_logical_deduction_seven_objects
2024_07_22T09_32_37.006648
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_logical_deduction_seven_objects
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_logical_deduction_seven_objects
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
2024_07_22T09_32_37.006648
132
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
2024_07_22T09_32_37.006648
132
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
2024_07_22T09_32_37.006648
132
target
0
0
1
33
8.15152
7.5
6.45535
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 56, 11, 38, 15, 5, 5, 0, 1, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
2024_07_22T09_32_37.006648
132
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
latest
132
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
latest
132
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
latest
132
target
0
0
1
33
8.15152
7.5
6.45535
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 56, 11, 38, 15, 5, 5, 0, 1, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_geometry_hard
latest
132
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_three_objects
2024_07_22T09_32_37.006648
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_three_objects
2024_07_22T09_32_37.006648
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_three_objects
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_tracking_shuffled_objects_three_objects
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
2024_07_22T09_32_37.006648
154
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
2024_07_22T09_32_37.006648
154
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
2024_07_22T09_32_37.006648
154
target
0
0
1
15
2.68182
2
2.25703
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 99, 40, 7, 1, 2, 4, 0, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
2024_07_22T09_32_37.006648
154
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
latest
154
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
latest
154
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
latest
154
target
0
0
1
15
2.68182
2
2.25703
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 99, 40, 7, 1, 2, 4, 0, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_num_theory_hard
latest
154
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 154 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_object_counting
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_object_counting
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_object_counting
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_object_counting
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
2024_07_22T09_32_37.006648
193
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
2024_07_22T09_32_37.006648
193
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
2024_07_22T09_32_37.006648
193
target
0
0
1
20
4.68394
3
4.33957
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20 ], "hist": [ 87, 52, 10, 3, 6, 16, 13, 4, 1, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
2024_07_22T09_32_37.006648
193
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_three_objects
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_three_objects
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_three_objects
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_three_objects
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
latest
193
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
latest
193
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
latest
193
target
0
0
1
20
4.68394
3
4.33957
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20 ], "hist": [ 87, 52, 10, 3, 6, 16, 13, 4, 1, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_prealgebra_hard
latest
193
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 193 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_movie_recommendation
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_movie_recommendation
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_movie_recommendation
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_movie_recommendation
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
2024_07_22T09_32_37.006648
280
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
2024_07_22T09_32_37.006648
280
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
2024_07_22T09_32_37.006648
280
target
0
0
1
41
7.19643
4
6.56323
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 155, 39, 59, 17, 7, 1, 1, 0, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
2024_07_22T09_32_37.006648
280
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
latest
280
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
latest
280
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
latest
280
target
0
0
1
41
7.19643
4
6.56323
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 155, 39, 59, 17, 7, 1, 1, 0, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_intermediate_algebra_hard
latest
280
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 280 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_reasoning_about_colored_objects
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_reasoning_about_colored_objects
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_reasoning_about_colored_objects
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_reasoning_about_colored_objects
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_web_of_lies
2024_07_22T09_32_37.006648
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_web_of_lies
2024_07_22T09_32_37.006648
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_salient_translation_error_detection
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_salient_translation_error_detection
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_web_of_lies
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_bbh_web_of_lies
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_salient_translation_error_detection
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_salient_translation_error_detection
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
2024_07_22T09_32_37.006648
135
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
2024_07_22T09_32_37.006648
135
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
2024_07_22T09_32_37.006648
135
target
0
0
1
51
12.45926
11
12.49284
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 51 ], "hist": [ 57, 36, 12, 10, 1, 9, 5, 4, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
2024_07_22T09_32_37.006648
135
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_snarks
2024_07_16T14_09_56.652556
178
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_snarks
2024_07_16T14_09_56.652556
178
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
latest
135
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
latest
135
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
latest
135
target
0
0
1
51
12.45926
11
12.49284
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 51 ], "hist": [ 57, 36, 12, 10, 1, 9, 5, 4, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_math_precalculus_hard
latest
135
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 135 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_snarks
latest
178
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_snarks
latest
178
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 178 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_diamond
2024_07_22T09_32_37.006648
198
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 198 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_diamond
2024_07_22T09_32_37.006648
198
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 198 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_sports_understanding
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_sports_understanding
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_diamond
latest
198
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 198 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_diamond
latest
198
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 198 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_sports_understanding
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_sports_understanding
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_temporal_sequences
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_temporal_sequences
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_temporal_sequences
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_temporal_sequences
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_tracking_shuffled_objects_seven_objects
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false