dataset
stringlengths
5
115
config
stringlengths
1
162
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
0
77.9k
null_count
int64
0
62.9M
null_proportion
float64
0
1
min
int64
0
9.25M
max
int64
0
1.07B
mean
float64
0
90.4M
median
float64
0
80.1M
std
float64
0
130M
histogram
dict
partial
bool
2 classes
SaylorTwift/gpt2
gpt2__leaderboard_musr_object_placements
latest
256
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_object_placements
latest
256
target
0
0
3
21
10.20312
11
4.32477
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 21 ], "hist": [ 33, 42, 19, 31, 41, 49, 22, 11, 7, 1 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_object_placements
latest
256
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_mmlu_pro
2024_07_16T14_09_56.652556
12,032
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_mmlu_pro
2024_07_16T14_09_56.652556
12,032
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_mmlu_pro
latest
12,032
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_mmlu_pro
latest
12,032
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 12032 ] }
false
soikit/cai_dataset_5_text_coloumn
default
test
141
text
0
0
223
826
388.94326
384
103.88674
{ "bin_edges": [ 223, 284, 345, 406, 467, 528, 589, 650, 711, 772, 826 ], "hist": [ 23, 22, 37, 33, 16, 5, 2, 1, 1, 1 ] }
false
soikit/cai_dataset_5_text_coloumn
default
train
562
text
0
0
210
860
413.50178
408
110.63726
{ "bin_edges": [ 210, 276, 342, 408, 474, 540, 606, 672, 738, 804, 860 ], "hist": [ 55, 104, 121, 141, 79, 33, 14, 9, 2, 4 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
2024_07_16T14_09_56.652556
250
target
0
0
37
89
58.508
58
10.91377
{ "bin_edges": [ 37, 43, 49, 55, 61, 67, 73, 79, 85, 89 ], "hist": [ 17, 30, 43, 54, 52, 29, 12, 9, 4 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
2024_07_16T14_09_56.652556
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
latest
250
target
0
0
37
89
58.508
58
10.91377
{ "bin_edges": [ 37, 43, 49, 55, 61, 67, 73, 79, 85, 89 ], "hist": [ 17, 30, 43, 54, 52, 29, 12, 9, 4 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_musr_team_allocation
latest
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
2024_07_16T14_09_56.652556
132
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
2024_07_16T14_09_56.652556
132
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
2024_07_16T14_09_56.652556
132
target
0
0
1
33
8.15152
7.5
6.45535
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 56, 11, 38, 15, 5, 5, 0, 1, 1 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
2024_07_16T14_09_56.652556
132
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
latest
132
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
latest
132
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
latest
132
target
0
0
1
33
8.15152
7.5
6.45535
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 56, 11, 38, 15, 5, 5, 0, 1, 1 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_math_geometry_hard
latest
132
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 132 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_boolean_expressions
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_boolean_expressions
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_boolean_expressions
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_boolean_expressions
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_causal_judgement
2024_07_16T14_09_56.652556
187
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_causal_judgement
2024_07_16T14_09_56.652556
187
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_causal_judgement
latest
187
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_causal_judgement
latest
187
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_disambiguation_qa
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_disambiguation_qa
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
DIBT/MPEP_MALAGASY
default
train
26
external_id
0
0
2
4
3.34615
3
0.56159
{ "bin_edges": [ 2, 3, 4, 4 ], "hist": [ 1, 15, 10 ] }
false
DIBT/MPEP_MALAGASY
default
train
26
metadata
0
0
66
904
125
94
170.45445
{ "bin_edges": [ 66, 150, 234, 318, 402, 486, 570, 654, 738, 822, 904 ], "hist": [ 24, 0, 0, 1, 0, 0, 0, 0, 0, 1 ] }
false
DIBT/MPEP_MALAGASY
default
train
26
source
0
0
25
5,439
391.26923
111
1,046.478
{ "bin_edges": [ 25, 567, 1109, 1651, 2193, 2735, 3277, 3819, 4361, 4903, 5439 ], "hist": [ 24, 1, 0, 0, 0, 0, 0, 0, 0, 1 ] }
false
DIBT/MPEP_MALAGASY
default
train
26
target-suggestion
0
0
35
771
233.07692
133
234.5496
{ "bin_edges": [ 35, 109, 183, 257, 331, 405, 479, 553, 627, 701, 771 ], "hist": [ 10, 6, 3, 2, 0, 0, 0, 2, 1, 2 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_disambiguation_qa
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_disambiguation_qa
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_geometric_shapes
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_geometric_shapes
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_geometric_shapes
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_geometric_shapes
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_five_objects
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_five_objects
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_five_objects
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_logical_deduction_five_objects
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_date_understanding
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_date_understanding
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_date_understanding
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_date_understanding
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_ruin_names
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_ruin_names
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_ruin_names
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_ruin_names
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
amanagg/Llama2-model-sample-datadet
default
train
1,000
text
0
0
58
11,443
1,499.042
1,206.5
1,255.75382
{ "bin_edges": [ 58, 1197, 2336, 3475, 4614, 5753, 6892, 8031, 9170, 10309, 11443 ], "hist": [ 495, 328, 116, 35, 12, 9, 2, 1, 0, 2 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_web_of_lies
2024_07_16T14_09_56.652556
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_web_of_lies
2024_07_16T14_09_56.652556
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_web_of_lies
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
SaylorTwift/gpt2
gpt2__leaderboard_bbh_web_of_lies
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
littlejohn-ai/spa-law-qa
default
train
23,725
answer
0
0
1
1,246
96.1117
87
62.94482
{ "bin_edges": [ 1, 126, 251, 376, 501, 626, 751, 876, 1001, 1126, 1246 ], "hist": [ 17820, 5441, 362, 63, 24, 6, 6, 1, 1, 1 ] }
false
littlejohn-ai/spa-law-qa
default
train
23,725
context
0
0
121
2,232
288.02626
255
142.43329
{ "bin_edges": [ 121, 333, 545, 757, 969, 1181, 1393, 1605, 1817, 2029, 2232 ], "hist": [ 16836, 5568, 1077, 169, 54, 10, 3, 5, 1, 2 ] }
false
littlejohn-ai/spa-law-qa
default
train
23,725
question
0
0
14
496
81.23684
75
35.76486
{ "bin_edges": [ 14, 63, 112, 161, 210, 259, 308, 357, 406, 455, 496 ], "hist": [ 7938, 11856, 3157, 623, 103, 34, 8, 5, 0, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_main
2024_07_22T09_32_37.006648
448
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 448 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_main
2024_07_22T09_32_37.006648
448
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 448 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_main
latest
448
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 448 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_main
latest
448
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 448 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_extended
2024_07_22T09_32_37.006648
546
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 546 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_extended
2024_07_22T09_32_37.006648
546
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 546 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_extended
latest
546
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 546 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_gpqa_extended
latest
546
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 546 ] }
false
coastalcph/LocalizedNarratives
default
valid
41,691
caption
0
0
21
981
146.49745
127
83.8033
{ "bin_edges": [ 21, 118, 215, 312, 409, 506, 603, 700, 797, 894, 981 ], "hist": [ 18450, 16414, 4791, 1413, 453, 121, 37, 7, 3, 2 ] }
false
coastalcph/LocalizedNarratives
default
valid
41,691
image_id
0
0
16
16
16
16
0
{ "bin_edges": [ 16, 16 ], "hist": [ 41691 ] }
false
coastalcph/LocalizedNarratives
default
test
126,020
caption
0
0
19
1,319
150.10613
129
87.43261
{ "bin_edges": [ 19, 150, 281, 412, 543, 674, 805, 936, 1067, 1198, 1319 ], "hist": [ 75854, 40327, 7786, 1589, 346, 81, 25, 8, 3, 1 ] }
false
coastalcph/LocalizedNarratives
default
test
126,020
image_id
0
0
16
16
16
16
0
{ "bin_edges": [ 16, 16 ], "hist": [ 126020 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
latest
250
target
0
0
37
89
58.508
58
10.91377
{ "bin_edges": [ 37, 43, 49, 55, 61, 67, 73, 79, 85, 89 ], "hist": [ 17, 30, 43, 54, 52, 29, 12, 9, 4 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
latest
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
2024_07_22T09_32_37.006648
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
2024_07_22T09_32_37.006648
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
2024_07_22T09_32_37.006648
250
target
0
0
37
89
58.508
58
10.91377
{ "bin_edges": [ 37, 43, 49, 55, 61, 67, 73, 79, 85, 89 ], "hist": [ 17, 30, 43, 54, 52, 29, 12, 9, 4 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_team_allocation
2024_07_22T09_32_37.006648
250
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
latest
256
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
latest
256
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
latest
256
target
0
0
3
21
10.20312
11
4.32477
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 21 ], "hist": [ 33, 42, 19, 31, 41, 49, 22, 11, 7, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
latest
256
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
2024_07_22T09_32_37.006648
256
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
2024_07_22T09_32_37.006648
256
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
2024_07_22T09_32_37.006648
256
target
0
0
3
21
10.20312
11
4.32477
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 21 ], "hist": [ 33, 42, 19, 31, 41, 49, 22, 11, 7, 1 ] }
false
open-llm-leaderboard/maldv__badger-mu-llama-3-8b-details
maldv__badger-mu-llama-3-8b__leaderboard_musr_object_placements
2024_07_22T09_32_37.006648
256
target_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 256 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_boolean_expressions
2024_07_22T09_46_39.319670
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_boolean_expressions
2024_07_22T09_46_39.319670
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_boolean_expressions
latest
250
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_boolean_expressions
latest
250
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 250 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_causal_judgement
2024_07_22T09_46_39.319670
187
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_causal_judgement
2024_07_22T09_46_39.319670
187
prompt_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false
open-llm-leaderboard/Eric111__CatunaMayo-DPO-details
Eric111__CatunaMayo-DPO__leaderboard_bbh_causal_judgement
latest
187
doc_hash
0
0
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 187 ] }
false