dataset
stringlengths
5
115
config
stringlengths
1
162
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
0
77.9k
null_count
int64
0
62.9M
null_proportion
float64
0
1
min
int64
0
9.25M
max
int64
0
1.07B
mean
float64
0
90.4M
median
float64
0
80.1M
std
float64
0
130M
histogram
dict
partial
bool
2 classes
clips/mfaq
no_flat
train
36,857
question
0
0
11
180
62.43875
61
20.93602
{ "bin_edges": [ 11, 28, 45, 62, 79, 96, 113, 130, 147, 164, 180 ], "hist": [ 889, 6521, 11506, 9863, 5847, 1714, 314, 119, 64, 20 ] }
false
clips/mfaq
pl
train
10,572
domain
0
0
5
32
12.14453
12
4.14676
{ "bin_edges": [ 5, 8, 11, 14, 17, 20, 23, 26, 29, 32, 32 ], "hist": [ 1745, 2379, 2679, 2663, 459, 174, 455, 2, 13, 3 ] }
false
clips/mfaq
pl
validation
123
domain
0
0
5
36
14.31707
14
4.79124
{ "bin_edges": [ 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 8, 43, 41, 16, 11, 3, 0, 1 ] }
false
huggingartists/yung-plague
default
train
38
text
0
0
917
4,678
2,092.15789
1,995
771.24587
{ "bin_edges": [ 917, 1294, 1671, 2048, 2425, 2802, 3179, 3556, 3933, 4310, 4678 ], "hist": [ 4, 9, 8, 6, 7, 2, 0, 0, 1, 1 ] }
false
huggingartists/zemfira
default
train
165
text
0
0
0
2,651
661.85455
666
413.65284
{ "bin_edges": [ 0, 266, 532, 798, 1064, 1330, 1596, 1862, 2128, 2394, 2651 ], "hist": [ 23, 40, 49, 35, 10, 4, 2, 0, 1, 1 ] }
false
luozhouyang/dureader
checklist
train
2,999
context
0
0
50
996
220.16872
157
164.83648
{ "bin_edges": [ 50, 145, 240, 335, 430, 525, 620, 715, 810, 905, 996 ], "hist": [ 1327, 828, 302, 212, 132, 77, 43, 33, 28, 17 ] }
false
luozhouyang/dureader
checklist
train
2,999
id
0
0
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 2999 ] }
false
luozhouyang/dureader
checklist
train
2,999
question
0
0
3
33
10.21807
10
3.2946
{ "bin_edges": [ 3, 7, 11, 15, 19, 23, 27, 31, 33 ], "hist": [ 337, 1431, 943, 238, 36, 10, 3, 1 ] }
false
luozhouyang/dureader
checklist
train
2,999
title
0
0
0
49
19.63888
21
9.96277
{ "bin_edges": [ 0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 49 ], "hist": [ 375, 84, 280, 544, 719, 504, 401, 77, 12, 3 ] }
false
luozhouyang/dureader
checklist
validation
1,130
context
0
0
36
995
207.13628
148
155.85806
{ "bin_edges": [ 36, 132, 228, 324, 420, 516, 612, 708, 804, 900, 995 ], "hist": [ 468, 349, 111, 92, 50, 19, 13, 14, 12, 2 ] }
false
luozhouyang/dureader
checklist
validation
1,130
id
0
0
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 1130 ] }
false
luozhouyang/dureader
checklist
validation
1,130
question
0
0
3
28
10.06195
10
3.19289
{ "bin_edges": [ 3, 6, 9, 12, 15, 18, 21, 24, 27, 28 ], "hist": [ 63, 287, 473, 213, 64, 18, 10, 0, 2 ] }
false
luozhouyang/dureader
checklist
validation
1,130
title
0
0
0
54
18.6292
20
10.03323
{ "bin_edges": [ 0, 6, 12, 18, 24, 30, 36, 42, 48, 54, 54 ], "hist": [ 143, 119, 206, 289, 203, 148, 19, 2, 0, 1 ] }
false
luozhouyang/dureader
robust
train
14,520
context
0
0
100
1,881
282.29697
212
193.20486
{ "bin_edges": [ 100, 279, 458, 637, 816, 995, 1174, 1353, 1532, 1711, 1881 ], "hist": [ 9205, 3092, 1181, 619, 404, 7, 6, 2, 2, 2 ] }
false
luozhouyang/dureader
robust
train
14,520
id
0
0
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 14520 ] }
false
luozhouyang/dureader
robust
train
14,520
question
0
0
2
42
9.25847
9
2.77124
{ "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 42 ], "hist": [ 1842, 10353, 2039, 242, 33, 8, 2, 0, 1 ] }
false
luozhouyang/dureader
robust
validation
1,417
context
0
0
100
1,347
284.28299
226
187.53389
{ "bin_edges": [ 100, 225, 350, 475, 600, 725, 850, 975, 1100, 1225, 1347 ], "hist": [ 704, 339, 179, 81, 56, 31, 22, 3, 1, 1 ] }
false
luozhouyang/dureader
robust
validation
1,417
id
0
0
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 1417 ] }
false
luozhouyang/dureader
robust
validation
1,417
question
0
0
3
24
9.41708
9
2.64875
{ "bin_edges": [ 3, 6, 9, 12, 15, 18, 21, 24, 24 ], "hist": [ 71, 439, 667, 176, 48, 14, 1, 1 ] }
false
luozhouyang/dureader
robust
test
50,000
context
0
0
11
5,020
327.64724
245
266.32963
{ "bin_edges": [ 11, 512, 1013, 1514, 2015, 2516, 3017, 3518, 4019, 4520, 5020 ], "hist": [ 40585, 8470, 672, 257, 7, 1, 3, 1, 2, 2 ] }
false
luozhouyang/dureader
robust
test
50,000
id
0
0
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 50000 ] }
false
luozhouyang/dureader
robust
test
50,000
question
0
0
3
72
10.29264
10
4.17668
{ "bin_edges": [ 3, 10, 17, 24, 31, 38, 45, 52, 59, 66, 72 ], "hist": [ 24442, 22574, 2146, 515, 237, 51, 16, 17, 1, 1 ] }
false
luozhouyang/dureader
checklist
test
49,992
context
0
0
1
2,612
183.19017
146
137.62976
{ "bin_edges": [ 1, 263, 525, 787, 1049, 1311, 1573, 1835, 2097, 2359, 2612 ], "hist": [ 39463, 10046, 248, 103, 32, 30, 27, 14, 11, 18 ] }
false
luozhouyang/dureader
checklist
test
49,992
id
0
0
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 49992 ] }
false
luozhouyang/dureader
checklist
test
49,992
question
0
0
2
44
10.20271
10
3.30622
{ "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 44 ], "hist": [ 4914, 31043, 11989, 1661, 282, 47, 36, 18, 2 ] }
false
luozhouyang/dureader
checklist
test
49,992
title
0
0
0
94
14.65526
17
11.6799
{ "bin_edges": [ 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 94 ], "hist": [ 17419, 12521, 14811, 5034, 177, 22, 4, 2, 0, 2 ] }
false
masked-neuron/amazon
clothing_majorshift02
train
33,471
date
0
0
10
10
10
10
0
{ "bin_edges": [ 10, 10 ], "hist": [ 33471 ] }
false
masked-neuron/amazon
clothing_majorshift02
train
33,471
text
0
0
2
11,110
400.28971
280
425.80979
{ "bin_edges": [ 2, 1113, 2224, 3335, 4446, 5557, 6668, 7779, 8890, 10001, 11110 ], "hist": [ 31848, 1367, 187, 40, 12, 6, 4, 3, 1, 3 ] }
false
tasksource/crowdflower
airline-sentiment
train
14,640
text
0
0
2
229
103.44925
114
36.5413
{ "bin_edges": [ 2, 25, 48, 71, 94, 117, 140, 163, 186, 209, 229 ], "hist": [ 376, 1168, 1618, 2076, 2405, 5146, 1835, 15, 0, 1 ] }
false
tasksource/crowdflower
political-media-bias
train
4,999
text
0
0
1
14,668
199.77616
137
300.99921
{ "bin_edges": [ 1, 1468, 2935, 4402, 5869, 7336, 8803, 10270, 11737, 13204, 14668 ], "hist": [ 4980, 16, 1, 0, 1, 0, 0, 0, 0, 1 ] }
false
tasksource/crowdflower
political-media-message
train
4,999
text
0
0
1
14,668
199.77616
137
300.99921
{ "bin_edges": [ 1, 1468, 2935, 4402, 5869, 7336, 8803, 10270, 11737, 13204, 14668 ], "hist": [ 4980, 16, 1, 0, 1, 0, 0, 0, 0, 1 ] }
false
tasksource/crowdflower
corporate-messaging
train
3,118
text
0
0
22
159
120.71456
129
22.44989
{ "bin_edges": [ 22, 36, 50, 64, 78, 92, 106, 120, 134, 148, 159 ], "hist": [ 10, 22, 35, 117, 192, 317, 440, 721, 1241, 23 ] }
false
tasksource/crowdflower
political-media-audience
train
4,999
text
0
0
1
14,668
199.77616
137
300.99921
{ "bin_edges": [ 1, 1468, 2935, 4402, 5869, 7336, 8803, 10270, 11737, 13204, 14668 ], "hist": [ 4980, 16, 1, 0, 1, 0, 0, 0, 0, 1 ] }
false
tasksource/crowdflower
economic-news
train
7,606
text
0
0
112
5,311
1,405.8254
1,334
478.21248
{ "bin_edges": [ 112, 632, 1152, 1672, 2192, 2712, 3232, 3752, 4272, 4792, 5311 ], "hist": [ 123, 2175, 3691, 1161, 303, 93, 45, 11, 2, 2 ] }
false
tasksource/crowdflower
text_emotion
train
39,998
text
0
0
1
145
73.07938
68
36.26267
{ "bin_edges": [ 1, 16, 31, 46, 61, 76, 91, 106, 121, 136, 145 ], "hist": [ 804, 4274, 5987, 6036, 5285, 4455, 3802, 3395, 4175, 1785 ] }
false
metamong1/summarization_optimization
Summarization Part Data
train
73,392
doc_id
0
0
6
15
9.9546
9
1.86649
{ "bin_edges": [ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15 ], "hist": [ 516, 0, 870, 55017, 0, 0, 0, 14728, 0, 2261 ] }
false
metamong1/summarization_optimization
Summarization Part Data
train
73,392
text
0
0
5
4,326
961.24628
919
326.23638
{ "bin_edges": [ 5, 438, 871, 1304, 1737, 2170, 2603, 3036, 3469, 3902, 4326 ], "hist": [ 2823, 29600, 29932, 9714, 1293, 19, 6, 2, 1, 2 ] }
false
metamong1/summarization_optimization
Summarization Part Data
train
73,392
title
0
0
1
257
29.69409
28
10.7345
{ "bin_edges": [ 1, 27, 53, 79, 105, 131, 157, 183, 209, 235, 257 ], "hist": [ 31217, 39765, 2182, 171, 28, 16, 7, 3, 1, 2 ] }
false
tasksource/crowdflower
sentiment_nuclear_power
train
190
text
0
0
37
152
110.28947
116.5
26.65824
{ "bin_edges": [ 37, 49, 61, 73, 85, 97, 109, 121, 133, 145, 152 ], "hist": [ 3, 4, 13, 17, 24, 21, 27, 29, 39, 13 ] }
false
tasksource/crowdflower
tweet_global_warming
train
4,223
text
0
0
21
161
112.08927
120
27.22438
{ "bin_edges": [ 21, 36, 51, 66, 81, 96, 111, 126, 141, 156, 161 ], "hist": [ 15, 70, 210, 391, 505, 501, 783, 1559, 164, 25 ] }
false
metamong1/summarization_optimization
Summarization Part Data
validation
18,348
doc_id
0
0
6
15
9.96185
9
1.87244
{ "bin_edges": [ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15 ], "hist": [ 123, 0, 221, 13737, 0, 0, 0, 3682, 0, 585 ] }
false
metamong1/summarization_optimization
Summarization Part Data
validation
18,348
text
0
0
63
3,209
962.87366
920
327.64934
{ "bin_edges": [ 63, 378, 693, 1008, 1323, 1638, 1953, 2268, 2583, 2898, 3209 ], "hist": [ 394, 3184, 7358, 4706, 2117, 570, 11, 5, 1, 2 ] }
false
metamong1/summarization_optimization
Summarization Part Data
validation
18,348
title
0
0
1
234
29.59647
28
10.72399
{ "bin_edges": [ 1, 25, 49, 73, 97, 121, 145, 169, 193, 217, 234 ], "hist": [ 6104, 11331, 830, 65, 8, 6, 1, 1, 1, 1 ] }
false
lhoestq/wikipedia_bn
default
train
167,786
text
0
0
11
125,666
1,487.62664
561
3,222.10607
{ "bin_edges": [ 11, 12577, 25143, 37709, 50275, 62841, 75407, 87973, 100539, 113105, 125666 ], "hist": [ 165756, 1505, 343, 106, 46, 14, 7, 5, 3, 1 ] }
false
lhoestq/wikipedia_bn
default
train
167,786
title
0
0
1
182
17.53488
15
9.68099
{ "bin_edges": [ 1, 20, 39, 58, 77, 96, 115, 134, 153, 172, 182 ], "hist": [ 113384, 47878, 5739, 713, 68, 1, 0, 2, 0, 1 ] }
false
gcaillaut/pubmed
nodes
train
19,717
node
0
0
4
8
7.53172
8
0.52833
{ "bin_edges": [ 4, 5, 6, 7, 8, 8 ], "hist": [ 1, 21, 228, 8710, 10757 ] }
false
metaeval/ethics
commonsense
validation
3,885
text
0
0
14
5,429
794.54183
93
945.8661
{ "bin_edges": [ 14, 556, 1098, 1640, 2182, 2724, 3266, 3808, 4350, 4892, 5429 ], "hist": [ 2157, 439, 475, 355, 240, 213, 3, 1, 0, 2 ] }
false
metaeval/ethics
commonsense
test
3,964
text
0
0
18
7,377
956.85595
762.5
959.97009
{ "bin_edges": [ 18, 754, 1490, 2226, 2962, 3698, 4434, 5170, 5906, 6642, 7377 ], "hist": [ 1972, 828, 659, 361, 137, 6, 0, 0, 0, 1 ] }
false
metaeval/ethics
deontology
test
3,536
text
0
0
30
106
45.94457
44
11.42965
{ "bin_edges": [ 30, 38, 46, 54, 62, 70, 78, 86, 94, 102, 106 ], "hist": [ 844, 1164, 812, 360, 192, 100, 48, 12, 0, 4 ] }
false
metaeval/ethics
virtue
validation
4,975
sentence1
0
0
37
168
71.19899
68
19.16262
{ "bin_edges": [ 37, 51, 65, 79, 93, 107, 121, 135, 149, 163, 168 ], "hist": [ 555, 1585, 1270, 980, 315, 185, 45, 20, 15, 5 ] }
false
metaeval/ethics
virtue
validation
4,975
sentence2
0
0
4
113
9.69166
9
2.99608
{ "bin_edges": [ 4, 15, 26, 37, 48, 59, 70, 81, 92, 103, 113 ], "hist": [ 4800, 174, 0, 0, 0, 0, 0, 0, 0, 1 ] }
false
metaeval/ethics
justice
train
21,791
text
0
0
49
414
103.10532
101
30.06768
{ "bin_edges": [ 49, 86, 123, 160, 197, 234, 271, 308, 345, 382, 414 ], "hist": [ 6312, 10601, 4036, 660, 121, 41, 11, 6, 1, 2 ] }
false
metaeval/ethics
virtue
test
4,780
sentence1
0
0
40
154
71.99791
68
19.47877
{ "bin_edges": [ 40, 52, 64, 76, 88, 100, 112, 124, 136, 148, 154 ], "hist": [ 600, 1220, 1185, 865, 445, 275, 100, 50, 30, 10 ] }
false
metaeval/ethics
virtue
test
4,780
sentence2
0
0
4
70
9.63515
9
2.73269
{ "bin_edges": [ 4, 11, 18, 25, 32, 39, 46, 53, 60, 67, 70 ], "hist": [ 3162, 1585, 32, 0, 0, 0, 0, 0, 0, 1 ] }
false
metaeval/ethics
commonsense
train
13,910
text
0
0
10
12,168
1,018.97383
664
1,136.65398
{ "bin_edges": [ 10, 1226, 2442, 3658, 4874, 6090, 7306, 8522, 9738, 10954, 12168 ], "hist": [ 8523, 3494, 1650, 194, 34, 9, 1, 2, 2, 1 ] }
false
metaeval/ethics
deontology
train
18,164
text
0
0
29
131
46.28964
44
11.15653
{ "bin_edges": [ 29, 40, 51, 62, 73, 84, 95, 106, 117, 128, 131 ], "hist": [ 5612, 7137, 3720, 1199, 377, 97, 16, 1, 0, 5 ] }
false
metaeval/ethics
deontology
validation
3,596
text
0
0
29
93
46.05784
44
10.55737
{ "bin_edges": [ 29, 36, 43, 50, 57, 64, 71, 78, 85, 92, 93 ], "hist": [ 572, 972, 916, 584, 312, 136, 64, 28, 8, 4 ] }
false
metaeval/ethics
justice
validation
2,704
text
0
0
42
253
102.10577
100
30.30749
{ "bin_edges": [ 42, 64, 86, 108, 130, 152, 174, 196, 218, 240, 253 ], "hist": [ 245, 614, 754, 630, 304, 98, 41, 10, 5, 3 ] }
false
metaeval/ethics
justice
test
2,052
text
0
0
50
262
103.05604
101
29.85207
{ "bin_edges": [ 50, 72, 94, 116, 138, 160, 182, 204, 226, 248, 262 ], "hist": [ 293, 511, 616, 389, 166, 52, 9, 10, 2, 4 ] }
false
metaeval/ethics
virtue
train
28,245
sentence1
0
0
40
168
69.48178
67
18.11906
{ "bin_edges": [ 40, 53, 66, 79, 92, 105, 118, 131, 144, 157, 168 ], "hist": [ 4966, 8443, 7356, 4229, 1983, 729, 345, 137, 47, 10 ] }
false
metaeval/ethics
virtue
train
28,245
sentence2
0
0
4
31
9.59511
9
2.579
{ "bin_edges": [ 4, 7, 10, 13, 16, 19, 22, 25, 28, 31, 31 ], "hist": [ 3037, 11527, 10343, 2838, 356, 80, 63, 0, 0, 1 ] }
false
GEM/totto
totto
challenge_train_sample
500
example_id
0
0
18
20
19.396
19
0.60656
{ "bin_edges": [ 18, 19, 20, 20 ], "hist": [ 32, 238, 230 ] }
false
GEM/totto
totto
challenge_train_sample
500
gem_id
0
0
30
32
31.78
32
0.46046
{ "bin_edges": [ 30, 31, 32, 32 ], "hist": [ 10, 90, 400 ] }
false
GEM/totto
totto
challenge_train_sample
500
gem_parent_id
0
0
15
18
17.076
17
0.4803
{ "bin_edges": [ 15, 16, 17, 18, 18 ], "hist": [ 4, 28, 394, 74 ] }
false
GEM/totto
totto
challenge_train_sample
500
linearized_input
0
0
97
4,938
469.992
350
438.52665
{ "bin_edges": [ 97, 582, 1067, 1552, 2037, 2522, 3007, 3492, 3977, 4462, 4938 ], "hist": [ 402, 70, 16, 4, 6, 0, 0, 0, 0, 2 ] }
false
GEM/totto
totto
challenge_train_sample
500
table_page_title
0
0
5
63
23.296
20
13.14115
{ "bin_edges": [ 5, 11, 17, 23, 29, 35, 41, 47, 53, 59, 63 ], "hist": [ 50, 166, 72, 54, 58, 46, 16, 22, 8, 8 ] }
false
GEM/totto
totto
challenge_train_sample
500
table_section_text
0
0
0
444
37.512
0
70.87686
{ "bin_edges": [ 0, 45, 90, 135, 180, 225, 270, 315, 360, 405, 444 ], "hist": [ 372, 40, 24, 38, 12, 10, 0, 2, 0, 2 ] }
false
GEM/totto
totto
challenge_train_sample
500
table_section_title
0
0
0
87
14.612
12
10.84291
{ "bin_edges": [ 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 87 ], "hist": [ 144, 216, 92, 28, 6, 8, 4, 0, 0, 2 ] }
false
GEM/totto
totto
challenge_train_sample
500
table_webpage_url
0
0
34
100
52.904
49
13.47568
{ "bin_edges": [ 34, 41, 48, 55, 62, 69, 76, 83, 90, 97, 100 ], "hist": [ 94, 132, 70, 82, 62, 20, 22, 8, 8, 2 ] }
false
GEM/totto
totto
challenge_train_sample
500
target
0
0
27
268
86.028
78
37.045
{ "bin_edges": [ 27, 52, 77, 102, 127, 152, 177, 202, 227, 252, 268 ], "hist": [ 60, 188, 120, 68, 32, 20, 4, 6, 0, 2 ] }
false
ghomasHudson/muld
AO3 Style Change Detection
validation
705
input
0
0
55,658
177,032
111,886.68085
112,520
31,645.0556
{ "bin_edges": [ 55658, 67796, 79934, 92072, 104210, 116348, 128486, 140624, 152762, 164900, 177032 ], "hist": [ 69, 74, 73, 88, 75, 87, 75, 75, 70, 19 ] }
false
ghomasHudson/muld
AO3 Style Change Detection
validation
705
metadata
0
0
965
6,644
3,242.94894
3,126
1,028.48975
{ "bin_edges": [ 965, 1533, 2101, 2669, 3237, 3805, 4373, 4941, 5509, 6077, 6644 ], "hist": [ 14, 79, 146, 138, 117, 99, 73, 29, 4, 6 ] }
false
ghomasHudson/muld
AO3 Style Change Detection
test
2,352
input
0
0
51,255
220,019
113,480.76276
112,657.5
32,702.9141
{ "bin_edges": [ 51255, 68132, 85009, 101886, 118763, 135640, 152517, 169394, 186271, 203148, 220019 ], "hist": [ 220, 354, 367, 360, 343, 337, 335, 35, 0, 1 ] }
false
ghomasHudson/muld
AO3 Style Change Detection
test
2,352
metadata
0
0
186
7,587
3,353.70238
3,294
1,084.68334
{ "bin_edges": [ 186, 927, 1668, 2409, 3150, 3891, 4632, 5373, 6114, 6855, 7587 ], "hist": [ 6, 69, 455, 549, 526, 429, 239, 61, 14, 4 ] }
false
ghomasHudson/muld
Character Archetype Classification
validation
166
input
0
0
77,571
461,523
216,105.14458
204,374
61,005.70961
{ "bin_edges": [ 77571, 115967, 154363, 192759, 231155, 269551, 307947, 346343, 384739, 423135, 461523 ], "hist": [ 2, 19, 57, 25, 25, 24, 12, 1, 0, 1 ] }
false
ghomasHudson/muld
AO3 Style Change Detection
train
6,354
input
0
0
54,805
189,438
113,190.46317
113,257
32,810.1252
{ "bin_edges": [ 54805, 68269, 81733, 95197, 108661, 122125, 135589, 149053, 162517, 175981, 189438 ], "hist": [ 676, 718, 790, 759, 740, 767, 701, 805, 394, 4 ] }
false
ghomasHudson/muld
AO3 Style Change Detection
train
6,354
metadata
0
0
674
7,251
3,276.67642
3,207.5
1,048.89393
{ "bin_edges": [ 674, 1332, 1990, 2648, 3306, 3964, 4622, 5280, 5938, 6596, 7251 ], "hist": [ 55, 652, 1307, 1339, 1302, 962, 531, 171, 32, 3 ] }
false
ghomasHudson/muld
Character Archetype Classification
train
1,256
input
0
0
9
546,294
212,135.41083
210,874.5
61,097.58216
{ "bin_edges": [ 9, 54638, 109267, 163896, 218525, 273154, 327783, 382412, 437041, 491670, 546294 ], "hist": [ 9, 41, 199, 455, 382, 139, 21, 4, 4, 2 ] }
false
ghomasHudson/muld
Character Archetype Classification
test
86
input
0
0
132,444
475,666
228,646.4186
229,529
65,440.02423
{ "bin_edges": [ 132444, 166767, 201090, 235413, 269736, 304059, 338382, 372705, 407028, 441351, 475666 ], "hist": [ 17, 16, 14, 17, 16, 4, 0, 0, 0, 2 ] }
false
ghomasHudson/muld
HotpotQA
train
52,000
input
0
0
3,706
1,300,657
96,566.50767
82,596
43,097.49585
{ "bin_edges": [ 3706, 133402, 263098, 392794, 522490, 652186, 781882, 911578, 1041274, 1170970, 1300657 ], "hist": [ 44906, 6619, 415, 42, 13, 0, 1, 0, 1, 3 ] }
true
ghomasHudson/muld
HotpotQA
validation
7,405
input
0
0
58,124
1,054,875
92,887.33288
77,402
43,741.57847
{ "bin_edges": [ 58124, 157800, 257476, 357152, 456828, 556504, 656180, 755856, 855532, 955208, 1054875 ], "hist": [ 6885, 427, 77, 12, 3, 0, 0, 0, 0, 1 ] }
true
ghomasHudson/muld
NarrativeQA
validation
3,373
input
0
0
59,850
1,961,070
348,127.58138
235,190
330,217.53388
{ "bin_edges": [ 59850, 249973, 440096, 630219, 820342, 1010465, 1200588, 1390711, 1580834, 1770957, 1961070 ], "hist": [ 1986, 749, 204, 89, 167, 89, 29, 0, 0, 60 ] }
true
ghomasHudson/muld
NarrativeQA
test
10,143
input
0
0
54,097
1,874,178
338,103.91078
257,618
250,132.46623
{ "bin_edges": [ 54097, 236106, 418115, 600124, 782133, 964142, 1146151, 1328160, 1510169, 1692178, 1874178 ], "hist": [ 4027, 3891, 1160, 383, 304, 176, 114, 30, 29, 29 ] }
true
ghomasHudson/muld
NarrativeQA
train
15,000
input
0
0
19,729
2,027,922
344,400.55267
249,014
310,961.89896
{ "bin_edges": [ 19729, 220549, 421369, 622189, 823009, 1023829, 1224649, 1425469, 1626289, 1827109, 2027922 ], "hist": [ 5881, 6185, 1023, 788, 359, 373, 67, 88, 89, 147 ] }
true
ghomasHudson/muld
OpenSubtitles
train
27,749
input
0
0
67
56,794
24,006.08508
22,620
9,674.84833
{ "bin_edges": [ 67, 5740, 11413, 17086, 22759, 28432, 34105, 39778, 45451, 51124, 56794 ], "hist": [ 263, 1299, 5501, 6998, 6205, 3326, 1848, 1298, 817, 194 ] }
false
ghomasHudson/muld
VLSP
test
478
input
0
0
59,911
744,287
214,618.66736
194,469
109,966.6018
{ "bin_edges": [ 59911, 128349, 196787, 265225, 333663, 402101, 470539, 538977, 607415, 675853, 744287 ], "hist": [ 116, 125, 100, 73, 38, 16, 4, 2, 3, 1 ] }
false
ghomasHudson/muld
OpenSubtitles
test
1,385
input
0
0
46,530
180,655
63,600.16029
60,467
11,684.08356
{ "bin_edges": [ 46530, 59943, 73356, 86769, 100182, 113595, 127008, 140421, 153834, 167247, 180655 ], "hist": [ 664, 513, 153, 39, 7, 3, 3, 1, 1, 1 ] }
false
ghomasHudson/muld
OpenSubtitles
test
1,385
metadata
0
0
2
2,971
128.67942
2
365.77292
{ "bin_edges": [ 2, 299, 596, 893, 1190, 1487, 1784, 2081, 2378, 2675, 2971 ], "hist": [ 1272, 19, 19, 23, 19, 16, 9, 3, 3, 2 ] }
false
lbox/lbox_open
precedent_corpus
train
150,000
precedent
0
0
0
904,103
3,307.15862
2,027
6,242.02804
{ "bin_edges": [ 0, 90411, 180822, 271233, 361644, 452055, 542466, 632877, 723288, 813699, 904103 ], "hist": [ 149924, 61, 7, 4, 1, 1, 1, 0, 0, 1 ] }
false
GEM/totto
totto
test
7,700
example_id
0
0
16
20
19.38623
19
0.61494
{ "bin_edges": [ 16, 17, 18, 19, 20, 20 ], "hist": [ 3, 37, 414, 3775, 3471 ] }
false
GEM/totto
totto
test
7,700
gem_id
0
0
12
15
14.85584
15
0.39315
{ "bin_edges": [ 12, 13, 14, 15, 15 ], "hist": [ 10, 90, 900, 6700 ] }
false
GEM/totto
totto
test
7,700
gem_parent_id
0
0
12
15
14.85584
15
0.39315
{ "bin_edges": [ 12, 13, 14, 15, 15 ], "hist": [ 10, 90, 900, 6700 ] }
false
GEM/totto
totto
test
7,700
linearized_input
0
0
73
245,337
533.04896
335
3,112.57844
{ "bin_edges": [ 73, 24600, 49127, 73654, 98181, 122708, 147235, 171762, 196289, 220816, 245337 ], "hist": [ 7694, 4, 0, 1, 0, 0, 0, 0, 0, 1 ] }
false
GEM/totto
totto
test
7,700
table_page_title
0
0
3
101
25.06961
22
14.07483
{ "bin_edges": [ 3, 13, 23, 33, 43, 53, 63, 73, 83, 93, 101 ], "hist": [ 1346, 2667, 1708, 1049, 531, 248, 111, 35, 3, 2 ] }
false
GEM/totto
totto
test
7,700
table_section_text
0
0
0
1,224
40.85961
0
87.1279
{ "bin_edges": [ 0, 123, 246, 369, 492, 615, 738, 861, 984, 1107, 1224 ], "hist": [ 6548, 940, 141, 38, 17, 4, 2, 5, 4, 1 ] }
false
GEM/totto
totto
test
7,700
table_section_title
0
0
0
111
14.4939
12
9.9324
{ "bin_edges": [ 0, 12, 24, 36, 48, 60, 72, 84, 96, 108, 111 ], "hist": [ 3342, 3228, 833, 199, 68, 19, 8, 2, 0, 1 ] }
false
GEM/totto
totto
test
7,700
table_webpage_url
0
0
32
130
54.67247
51
14.57614
{ "bin_edges": [ 32, 42, 52, 62, 72, 82, 92, 102, 112, 122, 130 ], "hist": [ 1296, 2642, 1622, 1155, 546, 240, 136, 46, 15, 2 ] }
false
GEM/totto
totto
validation
7,700
example_id
0
0
16
20
19.37429
19
0.62644
{ "bin_edges": [ 16, 17, 18, 19, 20, 20 ], "hist": [ 7, 53, 408, 3815, 3417 ] }
false
GEM/totto
totto
validation
7,700
gem_id
0
0
18
21
20.85584
21
0.39315
{ "bin_edges": [ 18, 19, 20, 21, 21 ], "hist": [ 10, 90, 900, 6700 ] }
false
GEM/totto
totto
validation
7,700
gem_parent_id
0
0
18
21
20.85584
21
0.39315
{ "bin_edges": [ 18, 19, 20, 21, 21 ], "hist": [ 10, 90, 900, 6700 ] }
false